Commit 893631a8 authored by Olivier Bertrand's avatar Olivier Bertrand

All the last changes made in the ob-10.0 branch including also changes of line...

All the last changes made in the ob-10.0 branch including also changes of line endings of some test files
parent 83ca074c
......@@ -245,6 +245,11 @@ storage/mroonga/vendor/groonga/src/suggest/groonga-suggest-create-dataset
*.ko
*.obj
*.elf
*.exp
*.manifest
*.dep
*.idb
*.res
# Precompiled Headers
*.gch
......@@ -293,7 +298,6 @@ storage/mroonga/vendor/groonga/src/suggest/groonga-suggest-create-dataset
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
[Rr]eleaseWithDebInfo/
x64/
x86/
build/
......
# Set the default behavior, in case people don't have core.autocrlf set.
* text=auto
# Explicitly declare text files you want to always be normalized and converted
# to native line endings on checkout.
*.c text
*.cc text
*.cpp text
*.h text
*.test text
# Declare files that will always have LF line endings on checkout.
*.result text eol=lf
mysql-test/connect/std_data/*.txt text eol=lf
mysql-test/connect/std_data/*.dat text eol=lf
# Denote all files that are truly binary and should not be modified.
*.png binary
*.jpg binary
*.c diff=cpp
*.h diff=cpp
*.cc diff=cpp
*.ic diff=cpp
*.cpp diff=cpp
# Edited by Olivier Bertrand
*-t
*.a
*.ctest
*.o
*.reject
*.so
*.so.*
*.spec
*~
*.bak
*.log
*.cmake
*.tgz
*.msg
.*.swp
*.ninja
.ninja_*
.gdb_history
CMakeFiles/
connect.dir/
connect.dir-Copie/
Debug/
MinSizeRel/
Release/
RelWithDebInfo/
# C and C++
# Compiled Object files
*.slo
*.lo
*.o
*.ko
*.obj
*.elf
*.exp
*.manifest
*.dep
*.idb
*.res
# Precompiled Headers
*.gch
*.pch
# Compiled Static libraries
*.lib
*.a
*.la
*.lai
*.lo
# Compiled Dynamic libraries
*.so
*.so.*
*.dylib
*.dll
# Executables
*.exe
*.out
*.app
*.i*86
*.x86_64
*.hex
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
# User-specific files
*.suo
*.user
*.userosscache
*.sln.docstates
*.ncb
*.sln
*.vcproj
*.vcproj.*
*.vcproj.*.*
*.vcproj.*.*.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
build/
bld/
[Bb]in/
[Oo]bj/
# Roslyn cache directories
*.ide/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
#NUNIT
*.VisualState.xml
TestResult.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
*_i.c
*_p.c
*_i.h
*.ilk
*.meta
*.obj
*.pch
*.pdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*.log
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opensdf
*.sdf
*.cachefile
# Visual Studio profiler
*.psess
*.vsp
*.vspx
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# JustCode is a .NET coding addin-in
.JustCode
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# NCrunch
_NCrunch_*
.*crunch*.local.xml
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# TODO: Comment the next line if you want to checkin your web deploy settings
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# NuGet Packages
*.nupkg
# The packages folder can be ignored because of Package Restore
**/packages/*
# except build/, which is used as an MSBuild target.
!**/packages/build/
# If using the old MSBuild-Integrated Package Restore, uncomment this:
#!**/packages/repositories.config
# Windows Azure Build Output
csx/
*.build.csdef
# Windows Store app package directory
AppPackages/
# Others
# sql/
*.Cache
ClientBin/
[Ss]tyle[Cc]op.*
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.pfx
*.publishsettings
node_modules/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
# SQL Server files
*.mdf
*.ldf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
# Microsoft Fakes
FakesAssemblies/
......@@ -74,16 +74,28 @@ typedef struct _dbfheader {
//uchar Dbfox :4; /* FoxPro if equal to 3 */
uchar Version; /* Version information flags */
char Filedate[3]; /* date, YYMMDD, binary. YY=year-1900 */
uint Records; /* records in the file */
ushort Headlen; /* bytes in the header */
ushort Reclen; /* bytes in a record */
ushort Fields; /* Reserved but used to store fields */
private:
/* The following four members are stored in little-endian format on disk */
char m_RecordsBuf[4]; /* records in the file */
char m_HeadlenBuf[2]; /* bytes in the header */
char m_ReclenBuf[2]; /* bytes in a record */
char m_FieldsBuf[2]; /* Reserved but used to store fields */
public:
char Incompleteflag; /* 01 if incomplete, else 00 */
char Encryptflag; /* 01 if encrypted, else 00 */
char Reserved2[12]; /* for LAN use */
char Mdxflag; /* 01 if production .mdx, else 00 */
char Language; /* Codepage */
char Reserved3[2];
uint Records(void) const {return uint4korr(m_RecordsBuf);}
ushort Headlen(void) const {return uint2korr(m_HeadlenBuf);}
ushort Reclen(void) const {return uint2korr(m_ReclenBuf);}
ushort Fields(void) const {return uint2korr(m_FieldsBuf);}
void SetHeadlen(ushort num) {int2store(m_HeadlenBuf, num);}
void SetReclen(ushort num) {int2store(m_ReclenBuf, num);}
void SetFields(ushort num) {int2store(m_FieldsBuf, num);}
} DBFHEADER;
/****************************************************************************/
......@@ -115,7 +127,6 @@ typedef struct _descriptor {
/* Side effects: */
/* Moves file pointer to byte 32; fills buffer at buf with */
/* first 32 bytes of file. */
/* Converts numeric values to platform byte ordering (LE in file) */
/****************************************************************************/
static int dbfhead(PGLOBAL g, FILE *file, PSZ fn, DBFHEADER *buf)
{
......@@ -143,13 +154,8 @@ static int dbfhead(PGLOBAL g, FILE *file, PSZ fn, DBFHEADER *buf)
} else
strcpy(g->Message, MSG(DBASE_FILE));
// Convert numeric fields to have them in platform byte ordering
buf->Records = uint4korr(&buf->Records);
buf->Headlen = uint2korr(&buf->Headlen);
buf->Reclen = uint2korr(&buf->Reclen);
// Check last byte(s) of header
if (fseek(file, buf->Headlen - dbc, SEEK_SET) != 0) {
if (fseek(file, buf->Headlen() - dbc, SEEK_SET) != 0) {
sprintf(g->Message, MSG(BAD_HEADER), fn);
return RC_FX;
} // endif fseek
......@@ -169,7 +175,7 @@ static int dbfhead(PGLOBAL g, FILE *file, PSZ fn, DBFHEADER *buf)
} // endif endmark
// Calculate here the number of fields while we have the dbc info
buf->Fields = (buf->Headlen - dbc - 1) / 32;
buf->SetFields((buf->Headlen() - dbc - 1) / 32);
fseek(file, HEADLEN, SEEK_SET);
return rc;
} // end of dbfhead
......@@ -225,7 +231,7 @@ PQRYRES DBFColumns(PGLOBAL g, char *dp, const char *fn, bool info)
/************************************************************************/
/* Allocate the structures used to refer to the result set. */
/************************************************************************/
fields = mainhead.Fields;
fields = mainhead.Fields();
} else
fields = 0;
......@@ -242,11 +248,11 @@ PQRYRES DBFColumns(PGLOBAL g, char *dp, const char *fn, bool info)
if (trace) {
htrc("Structure of %s\n", filename);
htrc("headlen=%hd reclen=%hd degree=%d\n",
mainhead.Headlen, mainhead.Reclen, fields);
mainhead.Headlen(), mainhead.Reclen(), fields);
htrc("flags(iem)=%d,%d,%d cp=%d\n", mainhead.Incompleteflag,
mainhead.Encryptflag, mainhead.Mdxflag, mainhead.Language);
htrc("%hd records, last changed %02d/%02d/%d\n",
mainhead.Records, mainhead.Filedate[1], mainhead.Filedate[2],
mainhead.Records(), mainhead.Filedate[1], mainhead.Filedate[2],
mainhead.Filedate[0] + (mainhead.Filedate[0] <= 30) ? 2000 : 1900);
htrc("Field Type Offset Len Dec Set Mdx\n");
} // endif trace
......@@ -404,13 +410,13 @@ int DBFBASE::ScanHeader(PGLOBAL g, PSZ fname, int lrecl, char *defpath)
} else if (rc == RC_FX)
return -1;
if ((int)header.Reclen != lrecl) {
sprintf(g->Message, MSG(BAD_LRECL), lrecl, header.Reclen);
if ((int)header.Reclen() != lrecl) {
sprintf(g->Message, MSG(BAD_LRECL), lrecl, header.Reclen());
return -1;
} // endif Lrecl
Records = (int)header.Records;
return (int)header.Headlen;
Records = (int)header.Records();
return (int)header.Headlen();
} // end of ScanHeader
/* ---------------------------- Class DBFFAM ------------------------------ */
......@@ -571,8 +577,8 @@ bool DBFFAM::AllocateBuffer(PGLOBAL g)
header->Filedate[0] = datm->tm_year - 100;
header->Filedate[1] = datm->tm_mon + 1;
header->Filedate[2] = datm->tm_mday;
int2store(&header->Headlen, hlen);
int2store(&header->Reclen, reclen);
header->SetHeadlen((ushort)hlen);
header->SetReclen((ushort)reclen);
descp = (DESCRIPTOR*)header;
// Currently only standard Xbase types are supported
......@@ -633,13 +639,13 @@ bool DBFFAM::AllocateBuffer(PGLOBAL g)
DBFHEADER header;
if ((rc = dbfhead(g, Stream, Tdbp->GetFile(g), &header)) == RC_OK) {
if (Lrecl != (int)header.Reclen) {
sprintf(g->Message, MSG(BAD_LRECL), Lrecl, header.Reclen);
if (Lrecl != (int)header.Reclen()) {
sprintf(g->Message, MSG(BAD_LRECL), Lrecl, header.Reclen());
return true;
} // endif Lrecl
Records = (int)header.Records;
Headlen = (int)header.Headlen;
Records = (int)header.Records();
Headlen = (int)header.Headlen();
} else if (rc == RC_NF) {
Records = 0;
Headlen = 0;
......@@ -869,12 +875,14 @@ void DBFFAM::CloseTableFile(PGLOBAL g, bool abort)
if (n > Records) {
// Update the number of rows in the file header
char filename[_MAX_PATH], nRecords[4];
char filename[_MAX_PATH];
int4store(&nRecords, n);
PlugSetPath(filename, To_File, Tdbp->GetPath());
if ((Stream= global_fopen(g, MSGID_OPEN_MODE_STRERROR, filename, "r+b")))
{
char nRecords[4];
int4store(nRecords, n);
fseek(Stream, 4, SEEK_SET); // Get header.Records position
fwrite(nRecords, sizeof(nRecords), 1, Stream);
fclose(Stream);
......@@ -951,13 +959,13 @@ bool DBMFAM::AllocateBuffer(PGLOBAL g)
/************************************************************************/
DBFHEADER *hp = (DBFHEADER*)Memory;
if (Lrecl != (int)uint2korr(&hp->Reclen)) {
sprintf(g->Message, MSG(BAD_LRECL), Lrecl, uint2korr(&hp->Reclen));
if (Lrecl != (int)hp->Reclen()) {
sprintf(g->Message, MSG(BAD_LRECL), Lrecl, hp->Reclen());
return true;
} // endif Lrecl
Records = (int)uint4korr(&hp->Records);
Headlen = (int)uint2korr(&hp->Headlen);
Records = (int)hp->Records();
Headlen = (int)hp->Headlen();
} // endif Headlen
/**************************************************************************/
......@@ -1011,7 +1019,7 @@ int DBMFAM::ReadBuffer(PGLOBAL g)
/* Data Base delete line routine for DBF access methods. */
/* Deleted lines are just flagged in the first buffer character. */
/****************************************************************************/
int DBMFAM::DeleteRecords(PGLOBAL, int irc)
int DBMFAM::DeleteRecords(PGLOBAL g, int irc)
{
if (irc == RC_OK)
*Fpos = '*';
......
......@@ -46,6 +46,7 @@
#include "plgdbsem.h"
#include "filamfix.h"
#include "tabdos.h"
#include "tabfix.h"
#include "osutil.h"
#ifndef INVALID_SET_FILE_POINTER
......@@ -133,18 +134,35 @@ bool FIXFAM::AllocateBuffer(PGLOBAL g)
if (Tdbp->GetFtype() == RECFM_BIN) {
// The buffer must be prepared depending on column types
int n = 0;
bool b = false;
PDOSDEF defp = (PDOSDEF)Tdbp->GetDef();
PCOLDEF cdp;
// PCOLDEF cdp;
PBINCOL colp;
// Prepare the first line of the buffer
memset(To_Buf, 0, Buflen);
#if 0
for (cdp = defp->GetCols(); cdp; cdp = cdp->GetNext()) {
if (IsTypeNum(cdp->GetType()))
if (!IsTypeNum(cdp->GetType())) {
memset(To_Buf + cdp->GetOffset(), ' ', cdp->GetClen());
b = true;
} // endif not num
n = MY_MAX(n, cdp->GetPoff() + cdp->GetClen());
n = MY_MAX(n, cdp->GetOffset() + cdp->GetClen());
} // endfor cdp
#endif // 0
for (colp = (PBINCOL)Tdbp->GetColumns(); colp;
colp = (PBINCOL)colp->GetNext())
if (!colp->IsSpecial()) {
if (!IsTypeNum(colp->GetResultType())) {
memset(To_Buf + colp->GetDeplac(), ' ', colp->GetLength());
b = true;
} // endif not num
n = MY_MAX(n, colp->GetDeplac() + colp->GetFileSize());
} // endif !special
// We do this for binary table because the lrecl can have been
// specified with additional space to include line ending.
......@@ -156,6 +174,7 @@ bool FIXFAM::AllocateBuffer(PGLOBAL g)
} // endif n
if (b)
// Now repeat this for the whole buffer
for (int len = Lrecl; len <= Buflen - Lrecl; len += Lrecl)
memcpy(To_Buf + len, To_Buf, Lrecl);
......
......@@ -118,7 +118,7 @@ extern "C" {
/* Static variables */
/***********************************************************************/
#if defined(STORAGE)
char sys_stamp[4] = SYS_STAMP;
char sys_stamp[5] = SYS_STAMP;
#else
extern char sys_stamp[];
#endif
......
......@@ -153,6 +153,7 @@
#endif // LIBXML2_SUPPORT
#include "taboccur.h"
#include "tabpivot.h"
#include "tabfix.h"
#define my_strupr(p) my_caseup_str(default_charset_info, (p));
#define my_strlwr(p) my_casedn_str(default_charset_info, (p));
......@@ -656,6 +657,7 @@ static int connect_init_func(void *p)
sql_print_information("connect_init: hton=%p", p);
DTVAL::SetTimeShift(); // Initialize time zone shift once for all
BINCOL::SetEndian(); // Initialize host endian setting
DBUG_RETURN(0);
} // end of connect_init_func
......
......@@ -410,7 +410,7 @@ void Json_Array_Add_deinit(UDF_INIT* initid)
} // end of Json_Array_Add_deinit
/***********************************************************************/
/* Add values to a Json array. */
/* Delete a value from a Json array. */
/***********************************************************************/
my_bool Json_Array_Delete_init(UDF_INIT *initid, UDF_ARGS *args, char *message)
{
......@@ -451,7 +451,7 @@ char *Json_Array_Delete(UDF_INIT *initid, UDF_ARGS *args, char *result,
} else {
n = *(int*)args->args[1];
arp = jvp->GetArray();
arp->DeleteValue(n - 1);
arp->DeleteValue(n);
arp->InitArray(g);
if (!(str = Serialize(g, arp, NULL, 0))) {
......
......@@ -99,6 +99,26 @@ extern "C" HINSTANCE s_hModule; // Saved module handle
PQRYRES OEMColumns(PGLOBAL g, PTOS topt, char *tab, char *db, bool info);
/***********************************************************************/
/* Get the plugin directory. */
/***********************************************************************/
char *GetPluginDir(void)
{
char *plugin_dir;
#if defined(_WIN64)
plugin_dir = (char *)GetProcAddress(GetModuleHandle(NULL),
"?opt_plugin_dir@@3PADEA");
#elif defined(_WIN32)
plugin_dir = (char*)GetProcAddress(GetModuleHandle(NULL),
"?opt_plugin_dir@@3PADA");
#else
plugin_dir = opt_plugin_dir;
#endif
return plugin_dir;
} // end of GetPluginDir
/***********************************************************************/
/* Get a unique enum table type ID. */
/***********************************************************************/
......@@ -328,7 +348,7 @@ PQRYRES OEMColumns(PGLOBAL g, PTOS topt, char *tab, char *db, bool info)
{
typedef PQRYRES (__stdcall *XCOLDEF) (PGLOBAL, void*, char*, char*, bool);
const char *module, *subtype;
char c, getname[40] = "Col";
char c, soname[_MAX_PATH], getname[40] = "Col";
#if defined(WIN32)
HANDLE hdll; /* Handle to the external DLL */
#else // !WIN32
......@@ -343,6 +363,17 @@ PQRYRES OEMColumns(PGLOBAL g, PTOS topt, char *tab, char *db, bool info)
if (!module || !subtype)
return NULL;
/*********************************************************************/
/* Ensure that the .dll doesn't have a path. */
/* This is done to ensure that only approved dll from the system */
/* directories are used (to make this even remotely secure). */
/*********************************************************************/
if (check_valid_path(module, strlen(module))) {
strcpy(g->Message, "Module cannot contain a path");
return NULL;
} else
PlugSetPath(soname, module, GetPluginDir());
// The exported name is always in uppercase
for (int i = 0; ; i++) {
c = subtype[i];
......@@ -352,11 +383,11 @@ PQRYRES OEMColumns(PGLOBAL g, PTOS topt, char *tab, char *db, bool info)
#if defined(WIN32)
// Load the Dll implementing the table
if (!(hdll = LoadLibrary(module))) {
if (!(hdll = LoadLibrary(soname))) {
char buf[256];
DWORD rc = GetLastError();
sprintf(g->Message, MSG(DLL_LOAD_ERROR), rc, module);
sprintf(g->Message, MSG(DLL_LOAD_ERROR), rc, soname);
FormatMessage(FORMAT_MESSAGE_FROM_SYSTEM |
FORMAT_MESSAGE_IGNORE_INSERTS, NULL, rc, 0,
(LPTSTR)buf, sizeof(buf), NULL);
......@@ -374,9 +405,9 @@ PQRYRES OEMColumns(PGLOBAL g, PTOS topt, char *tab, char *db, bool info)
const char *error = NULL;
// Load the desired shared library
if (!(hdll = dlopen(module, RTLD_LAZY))) {
if (!(hdll = dlopen(soname, RTLD_LAZY))) {
error = dlerror();
sprintf(g->Message, MSG(SHARED_LIB_ERR), module, SVP(error));
sprintf(g->Message, MSG(SHARED_LIB_ERR), soname, SVP(error));
return NULL;
} // endif Hdll
......
......@@ -14,11 +14,11 @@ SET time_zone='+00:00';
CREATE TABLE t1
(
fig INT(4) NOT NULL FIELD_FORMAT='C',
name CHAR(10) not null,
birth DATE NOT NULL,
id CHAR(5) NOT NULL FIELD_FORMAT='S',
name CHAR(10) NOT NULL,
birth DATE NOT NULL FIELD_FORMAT='L',
id CHAR(5) NOT NULL FIELD_FORMAT='L2',
salary DOUBLE(9,2) NOT NULL DEFAULT 0.00 FIELD_FORMAT='F',
dept INT(4) NOT NULL FIELD_FORMAT='S'
dept INT(4) NOT NULL FIELD_FORMAT='L2'
) ENGINE=CONNECT TABLE_TYPE=BIN BLOCK_SIZE=5 FILE_NAME='Testbal.dat';
SELECT * FROM t1;
fig name birth id salary dept
......@@ -41,11 +41,11 @@ DROP TABLE t1;
CREATE TABLE t1
(
fig INT(4) NOT NULL FIELD_FORMAT='C',
name CHAR(10) not null,
birth DATE NOT NULL,
id CHAR(5) NOT NULL FIELD_FORMAT='S',
name CHAR(10) NOT NULL,
birth DATE NOT NULL FIELD_FORMAT='L',
id CHAR(5) NOT NULL FIELD_FORMAT='L2',
salary DOUBLE(9,2) NOT NULL DEFAULT 0.00 FIELD_FORMAT='F',
dept INT(4) NOT NULL FIELD_FORMAT='S'
dept INT(4) NOT NULL FIELD_FORMAT='L2'
) ENGINE=CONNECT TABLE_TYPE=BIN READONLY=Yes FILE_NAME='Testbal.dat';
INSERT INTO t1 VALUES (7777,'BILL','1973-06-30',4444,5555.555,777);
ERROR HY000: Table 't1' is read only
......@@ -55,10 +55,10 @@ Table Create Table
t1 CREATE TABLE `t1` (
`fig` int(4) NOT NULL `FIELD_FORMAT`='C',
`name` char(10) NOT NULL,
`birth` date NOT NULL,
`id` char(5) NOT NULL `FIELD_FORMAT`='S',
`birth` date NOT NULL `FIELD_FORMAT`='L',
`id` char(5) NOT NULL `FIELD_FORMAT`='L2',
`salary` double(9,2) NOT NULL DEFAULT '0.00' `FIELD_FORMAT`='F',
`dept` int(4) NOT NULL `FIELD_FORMAT`='S'
`dept` int(4) NOT NULL `FIELD_FORMAT`='L2'
) ENGINE=CONNECT DEFAULT CHARSET=latin1 `TABLE_TYPE`=BIN `FILE_NAME`='Testbal.dat' `READONLY`=NO
INSERT INTO t1 VALUES (7777,'BILL','1973-06-30',4444,5555.555,777);
SELECT * FROM t1;
......@@ -74,10 +74,10 @@ Table Create Table
t1 CREATE TABLE `t1` (
`fig` int(4) NOT NULL `FIELD_FORMAT`='C',
`name` char(10) NOT NULL,
`birth` date NOT NULL,
`id` char(5) NOT NULL `FIELD_FORMAT`='S',
`birth` date NOT NULL `FIELD_FORMAT`='L',
`id` char(5) NOT NULL `FIELD_FORMAT`='L2',
`salary` double(9,2) NOT NULL DEFAULT '0.00' `FIELD_FORMAT`='F',
`dept` int(4) NOT NULL `FIELD_FORMAT`='S'
`dept` int(4) NOT NULL `FIELD_FORMAT`='L2'
) ENGINE=CONNECT DEFAULT CHARSET=latin1 `TABLE_TYPE`=BIN `FILE_NAME`='Testbal.dat' `READONLY`=YES
INSERT INTO t1 VALUES (7777,'BILL','1973-06-30',4444,5555.555,777);
ERROR HY000: Table 't1' is read only
......
......@@ -89,23 +89,23 @@ ISBN Language Subject AuthorFN AuthorLN Title Translation Translator Publisher L
UPDATE t1 SET AuthorFN = 'Philippe' WHERE AuthorLN = 'Knab';
SELECT * FROM t1 WHERE ISBN = '9782212090819';
ISBN Language Subject AuthorFN AuthorLN Title Translation Translator Publisher Location Year
9782212090819 fr applications Philippe Bernadac Construire une application XML NULL NULL Eyrolles Paris 1999
9782212090819 fr applications Franois Knab Construire une application XML NULL NULL Eyrolles Paris 1999
9782212090819 fr applications Jean-Christophe Bernadac Construire une application XML NULL NULL Eyrolles Paris 1999
9782212090819 fr applications Philippe Knab Construire une application XML NULL NULL Eyrolles Paris 1999
#
# To add an author a new table must be created
#
CREATE TABLE t2 (
FIRSTNAME CHAR(32),
LASTNAME CHAR(32))
ENGINE=CONNECT TABLE_TYPE=JSON FILE_NAME='biblio.json' OPTION_LIST='Object=[2]:AUTHOR';
ENGINE=CONNECT TABLE_TYPE=JSON FILE_NAME='biblio.json' OPTION_LIST='Object=[1]:AUTHOR';
SELECT * FROM t2;
FIRSTNAME LASTNAME
William J. Pardi
INSERT INTO t2 VALUES('Charles','Dickens');
SELECT * FROM t1;
ISBN Language Subject AuthorFN AuthorLN Title Translation Translator Publisher Location Year
9782212090819 fr applications Philippe Bernadac Construire une application XML NULL NULL Eyrolles Paris 1999
9782212090819 fr applications Franois Knab Construire une application XML NULL NULL Eyrolles Paris 1999
9782212090819 fr applications Jean-Christophe Bernadac Construire une application XML NULL NULL Eyrolles Paris 1999
9782212090819 fr applications Philippe Knab Construire une application XML NULL NULL Eyrolles Paris 1999
9782840825685 fr applications William J. Pardi XML en Action adapt de l'anglais par James Guerin Microsoft Press Paris 1999
9782840825685 fr applications Charles Dickens XML en Action adapt de l'anglais par James Guerin Microsoft Press Paris 1999
DROP TABLE t1;
......@@ -127,11 +127,11 @@ line
"SUBJECT": "applications",
"AUTHOR": [
{
"FIRSTNAME": "Philippe",
"FIRSTNAME": "Jean-Christophe",
"LASTNAME": "Bernadac"
},
{
"FIRSTNAME": "Franois",
"FIRSTNAME": "Philippe",
"LASTNAME": "Knab"
}
],
......@@ -252,9 +252,9 @@ DROP TABLE t1;
#
CREATE TABLE t2 (
WHO CHAR(12),
WEEK INT(2) FIELD_FORMAT='WEEK:[1]:NUMBER',
WHAT CHAR(32) FIELD_FORMAT='WEEK:[1]:EXPENSE:[X]:WHAT',
AMOUNT DOUBLE(8,2) FIELD_FORMAT='WEEK:[1]:EXPENSE:[X]:AMOUNT')
WEEK INT(2) FIELD_FORMAT='WEEK:[0]:NUMBER',
WHAT CHAR(32) FIELD_FORMAT='WEEK:[0]:EXPENSE:[X]:WHAT',
AMOUNT DOUBLE(8,2) FIELD_FORMAT='WEEK:[0]:EXPENSE:[X]:AMOUNT')
ENGINE=CONNECT TABLE_TYPE=JSON FILE_NAME='expense.json';
SELECT * FROM t2;
WHO WEEK WHAT AMOUNT
......@@ -268,9 +268,9 @@ Janet 3 Food 18.00
Janet 3 Beer 18.00
CREATE TABLE t3 (
WHO CHAR(12),
WEEK INT(2) FIELD_FORMAT='WEEK:[2]:NUMBER',
WHAT CHAR(32) FIELD_FORMAT='WEEK:[2]:EXPENSE:[X]:WHAT',
AMOUNT DOUBLE(8,2) FIELD_FORMAT='WEEK:[2]:EXPENSE:[X]:AMOUNT')
WEEK INT(2) FIELD_FORMAT='WEEK:[1]:NUMBER',
WHAT CHAR(32) FIELD_FORMAT='WEEK:[1]:EXPENSE:[X]:WHAT',
AMOUNT DOUBLE(8,2) FIELD_FORMAT='WEEK:[1]:EXPENSE:[X]:AMOUNT')
ENGINE=CONNECT TABLE_TYPE=JSON FILE_NAME='expense.json';
SELECT * FROM t3;
WHO WEEK WHAT AMOUNT
......@@ -284,9 +284,9 @@ Beth 4 Beer 15.00
Janet 4 Car 17.00
CREATE TABLE t4 (
WHO CHAR(12),
WEEK INT(2) FIELD_FORMAT='WEEK:[3]:NUMBER',
WHAT CHAR(32) FIELD_FORMAT='WEEK:[3]:EXPENSE:[X]:WHAT',
AMOUNT DOUBLE(8,2) FIELD_FORMAT='WEEK:[3]:EXPENSE:[X]:AMOUNT')
WEEK INT(2) FIELD_FORMAT='WEEK:[2]:NUMBER',
WHAT CHAR(32) FIELD_FORMAT='WEEK:[2]:EXPENSE:[X]:WHAT',
AMOUNT DOUBLE(8,2) FIELD_FORMAT='WEEK:[2]:EXPENSE:[X]:AMOUNT')
ENGINE=CONNECT TABLE_TYPE=JSON FILE_NAME='expense.json';
SELECT * FROM t4;
WHO WEEK WHAT AMOUNT
......
Warnings:
Warning 1105 No file name. Table will use t1.xml
SET NAMES utf8;
#
# Testing HTML like XML file
#
CREATE TABLE beers (
`Name` CHAR(16) FIELD_FORMAT='brandName',
`Origin` CHAR(16) FIELD_FORMAT='origin',
`Description` CHAR(32) FIELD_FORMAT='details')
ENGINE=CONNECT TABLE_TYPE=XML FILE_NAME='beers.xml'
TABNAME='table' OPTION_LIST='xmlsup=libxml2,rownode=tr,colnode=td';
SELECT * FROM beers;
Name Origin Description
Huntsman Bath, UK Wonderful hop, light alcohol
Tuborg Danmark In small bottles
DROP TABLE beers;
#
# Testing HTML file
#
CREATE TABLE coffee (
`Name` CHAR(16),
`Cups` INT(8),
`Type` CHAR(16),
`Sugar` CHAR(4))
ENGINE=CONNECT TABLE_TYPE=XML FILE_NAME='coffee.htm'
TABNAME='TABLE' HEADER=1 OPTION_LIST='xmlsup=libxml2,Coltype=HTML';
SELECT * FROM coffee;
Name Cups Type Sugar
T. Sexton 10 Espresso No
J. Dinnen 5 Decaf Yes
DROP TABLE coffee;
<?xml version="1.0"?>
<Beers>
<table>
<th><td>Name</td><td>Origin</td><td>Description</td></th>
<tr>
<td><brandName>Huntsman</brandName></td>
<td><origin>Bath, UK</origin></td>
<td><details>Wonderful hop, light alcohol</details></td>
</tr>
<tr>
<td><brandName>Tuborg</brandName></td>
<td><origin>Danmark</origin></td>
<td><details>In small bottles</details></td>
</tr>
</table>
</Beers>
<TABLE summary="This table charts the number of cups of coffe
consumed by each senator, the type of coffee (decaf
or regular), and whether taken with sugar.">
<CAPTION>Cups of coffee consumed by each senator</CAPTION>
<TR>
<TH>Name</TH>
<TH>Cups</TH>
<TH>Type of Coffee</TH>
<TH>Sugar?</TH>
</TR>
<TR>
<TD>T. Sexton</TD>
<TD>10</TD>
<TD>Espresso</TD>
<TD>No</TD>
</TR>
<TR>
<TD>J. Dinnen</TD>
<TD>5</TD>
<TD>Decaf</TD>
<TD>Yes</TD>
</TR>
</TABLE>
......@@ -19,11 +19,11 @@ SET time_zone='+00:00';
CREATE TABLE t1
(
fig INT(4) NOT NULL FIELD_FORMAT='C',
name CHAR(10) not null,
birth DATE NOT NULL,
id CHAR(5) NOT NULL FIELD_FORMAT='S',
name CHAR(10) NOT NULL,
birth DATE NOT NULL FIELD_FORMAT='L',
id CHAR(5) NOT NULL FIELD_FORMAT='L2',
salary DOUBLE(9,2) NOT NULL DEFAULT 0.00 FIELD_FORMAT='F',
dept INT(4) NOT NULL FIELD_FORMAT='S'
dept INT(4) NOT NULL FIELD_FORMAT='L2'
) ENGINE=CONNECT TABLE_TYPE=BIN BLOCK_SIZE=5 FILE_NAME='Testbal.dat';
SELECT * FROM t1;
......@@ -40,11 +40,11 @@ DROP TABLE t1;
CREATE TABLE t1
(
fig INT(4) NOT NULL FIELD_FORMAT='C',
name CHAR(10) not null,
birth DATE NOT NULL,
id CHAR(5) NOT NULL FIELD_FORMAT='S',
name CHAR(10) NOT NULL,
birth DATE NOT NULL FIELD_FORMAT='L',
id CHAR(5) NOT NULL FIELD_FORMAT='L2',
salary DOUBLE(9,2) NOT NULL DEFAULT 0.00 FIELD_FORMAT='F',
dept INT(4) NOT NULL FIELD_FORMAT='S'
dept INT(4) NOT NULL FIELD_FORMAT='L2'
) ENGINE=CONNECT TABLE_TYPE=BIN READONLY=Yes FILE_NAME='Testbal.dat';
--error ER_OPEN_AS_READONLY
INSERT INTO t1 VALUES (7777,'BILL','1973-06-30',4444,5555.555,777);
......
......@@ -97,7 +97,7 @@ SELECT * FROM t1 WHERE ISBN = '9782212090819';
CREATE TABLE t2 (
FIRSTNAME CHAR(32),
LASTNAME CHAR(32))
ENGINE=CONNECT TABLE_TYPE=JSON FILE_NAME='biblio.json' OPTION_LIST='Object=[2]:AUTHOR';
ENGINE=CONNECT TABLE_TYPE=JSON FILE_NAME='biblio.json' OPTION_LIST='Object=[1]:AUTHOR';
SELECT * FROM t2;
INSERT INTO t2 VALUES('Charles','Dickens');
SELECT * FROM t1;
......@@ -162,25 +162,25 @@ DROP TABLE t1;
--echo #
CREATE TABLE t2 (
WHO CHAR(12),
WEEK INT(2) FIELD_FORMAT='WEEK:[1]:NUMBER',
WHAT CHAR(32) FIELD_FORMAT='WEEK:[1]:EXPENSE:[X]:WHAT',
AMOUNT DOUBLE(8,2) FIELD_FORMAT='WEEK:[1]:EXPENSE:[X]:AMOUNT')
WEEK INT(2) FIELD_FORMAT='WEEK:[0]:NUMBER',
WHAT CHAR(32) FIELD_FORMAT='WEEK:[0]:EXPENSE:[X]:WHAT',
AMOUNT DOUBLE(8,2) FIELD_FORMAT='WEEK:[0]:EXPENSE:[X]:AMOUNT')
ENGINE=CONNECT TABLE_TYPE=JSON FILE_NAME='expense.json';
SELECT * FROM t2;
CREATE TABLE t3 (
WHO CHAR(12),
WEEK INT(2) FIELD_FORMAT='WEEK:[2]:NUMBER',
WHAT CHAR(32) FIELD_FORMAT='WEEK:[2]:EXPENSE:[X]:WHAT',
AMOUNT DOUBLE(8,2) FIELD_FORMAT='WEEK:[2]:EXPENSE:[X]:AMOUNT')
WEEK INT(2) FIELD_FORMAT='WEEK:[1]:NUMBER',
WHAT CHAR(32) FIELD_FORMAT='WEEK:[1]:EXPENSE:[X]:WHAT',
AMOUNT DOUBLE(8,2) FIELD_FORMAT='WEEK:[1]:EXPENSE:[X]:AMOUNT')
ENGINE=CONNECT TABLE_TYPE=JSON FILE_NAME='expense.json';
SELECT * FROM t3;
CREATE TABLE t4 (
WHO CHAR(12),
WEEK INT(2) FIELD_FORMAT='WEEK:[3]:NUMBER',
WHAT CHAR(32) FIELD_FORMAT='WEEK:[3]:EXPENSE:[X]:WHAT',
AMOUNT DOUBLE(8,2) FIELD_FORMAT='WEEK:[3]:EXPENSE:[X]:AMOUNT')
WEEK INT(2) FIELD_FORMAT='WEEK:[2]:NUMBER',
WHAT CHAR(32) FIELD_FORMAT='WEEK:[2]:EXPENSE:[X]:WHAT',
AMOUNT DOUBLE(8,2) FIELD_FORMAT='WEEK:[2]:EXPENSE:[X]:AMOUNT')
ENGINE=CONNECT TABLE_TYPE=JSON FILE_NAME='expense.json';
SELECT * FROM t4;
......
--disable_query_log
#
# Check if server has support for loading plugins
#
if (`SELECT @@have_dynamic_loading != 'YES'`) {
--skip UDF requires dynamic loading
}
let $is_win = `select convert(@@version_compile_os using latin1) IN ("Win32","Win64","Windows")`;
if ($is_win)
{
--eval CREATE FUNCTION Json_Array RETURNS STRING SONAME 'ha_connect.dll';
--eval CREATE FUNCTION Json_Array_Add RETURNS STRING SONAME 'ha_connect.dll';
--eval CREATE FUNCTION Json_Object RETURNS STRING SONAME 'ha_connect.dll';
--eval CREATE FUNCTION Json_Object_Nonull RETURNS STRING SONAME 'ha_connect.dll';
--eval CREATE FUNCTION Json_Value returns STRING SONAME 'ha_connect.dll';
--eval CREATE AGGREGATE FUNCTION Json_Array_Grp RETURNS STRING SONAME 'ha_connect.dll';
--eval CREATE AGGREGATE FUNCTION Json_Object_Grp RETURNS STRING SONAME 'ha_connect.dll';
}
if (!$is_win)
{
--eval CREATE FUNCTION Json_Array RETURNS STRING SONAME 'ha_connect.so';
--eval CREATE FUNCTION Json_Array_Add RETURNS STRING SONAME 'ha_connect.so';
--eval CREATE FUNCTION Json_Object RETURNS STRING SONAME 'ha_connect.so';
--eval CREATE FUNCTION Json_Object_Nonull RETURNS STRING SONAME 'ha_connect.so';
--eval CREATE FUNCTION Json_Value returns STRING SONAME 'ha_connect.so';
--eval CREATE AGGREGATE FUNCTION Json_Array_Grp RETURNS STRING SONAME 'ha_connect.so';
--eval CREATE AGGREGATE FUNCTION Json_Object_Grp RETURNS STRING SONAME 'ha_connect.so';
}
--enable_query_log
--source json_udf.inc
let $MYSQLD_DATADIR= `select @@datadir`;
--copy_file $MTR_SUITE_DIR/std_data/biblio.json $MYSQLD_DATADIR/test/biblio.json
--copy_file $MTR_SUITE_DIR/std_data/employee.dat $MYSQLD_DATADIR/test/employee.dat
--echo #
--echo # Test UDF's with constant arguments
--echo #
SELECT Json_Array();
SELECT Json_Object(56,3.1416,'foo',NULL);
SELECT Json_Object(56 qty,3.1416 price,'foo' truc, NULL garanty);
SELECT Json_Array(56,3.1416,'My name is "Foo"',NULL);
--error ER_CANT_INITIALIZE_UDF
SELECT Json_Array_Add(Json_Array(56,3.1416,'foo',NULL)) Array;
SELECT Json_Array_Add(Json_Array(56,3.1416,'foo',NULL),'One more') Array;
SELECT Json_Array_Add(Json_Value('one value'),'One more');
--error ER_CANT_INITIALIZE_UDF
SELECT Json_Array_Add('one value','One more');
SELECT Json_Array_Add('one value' json_,'One more');
--error ER_CANT_INITIALIZE_UDF
SELECT Json_Value(56,3.1416,'foo',NULL);
SELECT Json_Value(3.1416);
SELECT Json_Value('foo');
SELECT Json_Value(NULL);
SELECT Json_Value();
SELECT Json_Object();
SELECT Json_Object(Json_Array(56,3.1416,'foo'),NULL);
SELECT Json_Array(Json_Array(56,3.1416,'foo'),NULL);
SELECT Json_Array(Json_Object(56 "qty",3.1416 "price",'foo'),NULL);
--echo #
--echo # Test UDF's with column arguments
--echo #
CREATE TABLE t1
(
ISBN CHAR(15),
LANG CHAR(2),
SUBJECT CHAR(32),
AUTHOR CHAR(64),
TITLE CHAR(32),
TRANSLATION CHAR(32),
TRANSLATOR CHAR(80),
PUBLISHER CHAR(32),
DATEPUB int(4)
) ENGINE=CONNECT TABLE_TYPE=JSON FILE_NAME='biblio.json';
SELECT Json_Array(AUTHOR, TITLE, DATEPUB) FROM t1;
SELECT Json_Object(AUTHOR, TITLE, DATEPUB) FROM t1;
--error ER_CANT_INITIALIZE_UDF
SELECT Json_Array_Grp(TITLE, DATEPUB) FROM t1;
SELECT Json_Array_Grp(TITLE) FROM t1;
DROP TABLE t1;
CREATE TABLE t1 (
SERIALNO CHAR(5) NOT NULL,
NAME VARCHAR(12) NOT NULL FLAG=6,
SEX SMALLINT(1) NOT NULL,
TITLE VARCHAR(15) NOT NULL FLAG=20,
MANAGER CHAR(5) DEFAULT NULL,
DEPARTMENT CHAr(4) NOT NULL FLAG=41,
SECRETARY CHAR(5) DEFAULT NULL FLAG=46,
SALARY DOUBLE(8,2) NOT NULL FLAG=52
) ENGINE=CONNECT TABLE_TYPE=FIX BLOCK_SIZE=8 FILE_NAME='employee.dat' ENDING=1;
SELECT Json_Object(SERIALNO, NAME, TITLE, SALARY) FROM t1 WHERE NAME = 'MERCHANT';
SELECT DEPARTMENT, Json_Array_Grp(NAME) FROM t1 GROUP BY DEPARTMENT;
set connect_json_grp_size=30;
SELECT Json_Array(DEPARTMENT, Json_Array_Grp(NAME)) FROM t1 GROUP BY DEPARTMENT;
SELECT Json_Object(DEPARTMENT, Json_Array_Grp(NAME) json_NAMES) FROM t1 GROUP BY DEPARTMENT;
SELECT Json_Object(DEPARTMENT, Json_Array_Grp(Json_Object(SERIALNO, NAME, TITLE, SALARY)) json_EMPLOYES) FROM t1 GROUP BY DEPARTMENT;
SELECT Json_Object(DEPARTMENT, TITLE, Json_Array_Grp(Json_Object(SERIALNO, NAME, SALARY)) json_EMPLOYES) FROM t1 GROUP BY DEPARTMENT, TITLE;
--error ER_CANT_INITIALIZE_UDF
SELECT Json_Object_Grp(SALARY) FROM t1;
SELECT Json_Object_Grp(SALARY, NAME) FROM t1;
SELECT Json_Object(DEPARTMENT, Json_Object_Grp(SALARY, NAME) "Json_SALARIES") FROM t1 GROUP BY DEPARTMENT;
SELECT Json_Array_Grp(NAME) from t1;
DROP TABLE t1;
DROP FUNCTION Json_Array;
DROP FUNCTION Json_Array_Add;
DROP FUNCTION Json_Object;
DROP FUNCTION Json_Object_Nonull;
DROP FUNCTION Json_Value;
DROP FUNCTION Json_Array_Grp;
DROP FUNCTION Json_Object_Grp;
#
# Clean up
#
--remove_file $MYSQLD_DATADIR/test/biblio.json
--remove_file $MYSQLD_DATADIR/test/employee.dat
--source have_libxml2.inc
let $MYSQLD_DATADIR= `select @@datadir`;
SET NAMES utf8;
--copy_file $MTR_SUITE_DIR/std_data/beers.xml $MYSQLD_DATADIR/test/beers.xml
--copy_file $MTR_SUITE_DIR/std_data/coffee.htm $MYSQLD_DATADIR/test/coffee.htm
--echo #
--echo # Testing HTML like XML file
--echo #
CREATE TABLE beers (
`Name` CHAR(16) FIELD_FORMAT='brandName',
`Origin` CHAR(16) FIELD_FORMAT='origin',
`Description` CHAR(32) FIELD_FORMAT='details')
ENGINE=CONNECT TABLE_TYPE=XML FILE_NAME='beers.xml'
TABNAME='table' OPTION_LIST='xmlsup=libxml2,rownode=tr,colnode=td';
SELECT * FROM beers;
DROP TABLE beers;
--echo #
--echo # Testing HTML file
--echo #
CREATE TABLE coffee (
`Name` CHAR(16),
`Cups` INT(8),
`Type` CHAR(16),
`Sugar` CHAR(4))
ENGINE=CONNECT TABLE_TYPE=XML FILE_NAME='coffee.htm'
TABNAME='TABLE' HEADER=1 OPTION_LIST='xmlsup=libxml2,Coltype=HTML';
SELECT * FROM coffee;
DROP TABLE coffee;
#
# Clean up
#
--remove_file $MYSQLD_DATADIR/test/beers.xml
--remove_file $MYSQLD_DATADIR/test/coffee.htm
......@@ -56,6 +56,7 @@ extern handlerton *connect_hton;
/* External function. */
/***********************************************************************/
USETEMP UseTemp(void);
char *GetPluginDir(void);
/* --------------------------- Class RELDEF -------------------------- */
......@@ -322,7 +323,7 @@ int TABDEF::GetColCatInfo(PGLOBAL g)
if ((nof= cdp->Define(g, NULL, pcf, poff)) < 0)
return -1; // Error, probably unhandled type
else if (nof)
else
loff= cdp->GetOffset();
switch (tc) {
......@@ -333,16 +334,24 @@ int TABDEF::GetColCatInfo(PGLOBAL g)
if (nof)
// Field width is the internal representation width
// that can also depend on the column format
switch (cdp->Fmt ? *cdp->Fmt : 'X') {
switch (cdp->Fmt ? *cdp->Fmt : cdp->Decode ? 'C' : 'X') {
case 'X': nof= cdp->Clen;
case 'C': break;
case 'R':
case 'F':
case 'L':
// case 'L':
case 'I': nof= 4; break;
case 'D': nof= 8; break;
case 'S': nof= 2; break;
case 'T': nof= 1; break;
default: nof= cdp->Clen;
default: /* New format */
for (nof= 0, i= 0; cdp->Fmt[i]; i++)
if (isdigit(cdp->Fmt[i]))
nof= (nof * 10 + (cdp->Fmt[i] - '0'));
if (!nof)
nof= cdp->Clen;
} // endswitch Fmt
default:
......@@ -369,20 +378,16 @@ int TABDEF::GetColCatInfo(PGLOBAL g)
// not specified (for instance if quoted is specified)
// if ((ending= Hc->GetIntegerOption("Ending")) < 0) {
if ((ending= Hc->GetIntegerOption("Ending")) <= 0) {
#if defined(WIN32)
ending= 2;
#else
ending= 1;
#endif
ending= (tc == TAB_BIN || tc == TAB_VEC) ? 0 : CRLF;
Hc->SetIntegerOption("Ending", ending);
} // endif ending
// Calculate the default record size
switch (tc) {
case TAB_FIX:
case TAB_BIN:
recln= nlg + ending; // + length of line ending
break;
case TAB_BIN:
case TAB_VEC:
recln= nlg;
......@@ -433,20 +438,31 @@ void TABDEF::SetIndexInfo(void)
PTABDEF OEMDEF::GetXdef(PGLOBAL g)
{
typedef PTABDEF (__stdcall *XGETDEF) (PGLOBAL, void *);
char c, getname[40] = "Get";
char c, soname[_MAX_PATH], getname[40] = "Get";
PTABDEF xdefp;
XGETDEF getdef = NULL;
PCATLG cat = Cat;
/*********************************************************************/
/* Ensure that the .dll doesn't have a path. */
/* This is done to ensure that only approved dll from the system */
/* directories are used (to make this even remotely secure). */
/*********************************************************************/
if (check_valid_path(Module, strlen(Module))) {
strcpy(g->Message, "Module cannot contain a path");
return NULL;
} else
PlugSetPath(soname, Module, GetPluginDir());
#if defined(WIN32)
// Is the DLL already loaded?
if (!Hdll && !(Hdll = GetModuleHandle(Module)))
if (!Hdll && !(Hdll = GetModuleHandle(soname)))
// No, load the Dll implementing the function
if (!(Hdll = LoadLibrary(Module))) {
if (!(Hdll = LoadLibrary(soname))) {
char buf[256];
DWORD rc = GetLastError();
sprintf(g->Message, MSG(DLL_LOAD_ERROR), rc, Module);
sprintf(g->Message, MSG(DLL_LOAD_ERROR), rc, soname);
FormatMessage(FORMAT_MESSAGE_FROM_SYSTEM |
FORMAT_MESSAGE_IGNORE_INSERTS, NULL, rc, 0,
(LPTSTR)buf, sizeof(buf), NULL);
......@@ -471,6 +487,7 @@ PTABDEF OEMDEF::GetXdef(PGLOBAL g)
const char *error = NULL;
Dl_info dl_info;
#if 0 // Don't know what all this stuff does
// The OEM lib must retrieve exported CONNECT variables
if (dladdr(&connect_hton, &dl_info)) {
if (dlopen(dl_info.dli_fname, RTLD_NOLOAD | RTLD_NOW | RTLD_GLOBAL) == 0) {
......@@ -484,13 +501,14 @@ PTABDEF OEMDEF::GetXdef(PGLOBAL g)
sprintf(g->Message, "dladdr failed: %s, OEM not supported", SVP(error));
return NULL;
} // endif dladdr
#endif // 0
// Is the library already loaded?
// if (!Hdll && !(Hdll = ???))
if (!Hdll && !(Hdll = dlopen(soname, RTLD_NOLOAD)))
// Load the desired shared library
if (!(Hdll = dlopen(Module, RTLD_LAZY))) {
if (!(Hdll = dlopen(soname, RTLD_LAZY))) {
error = dlerror();
sprintf(g->Message, MSG(SHARED_LIB_ERR), Module, SVP(error));
sprintf(g->Message, MSG(SHARED_LIB_ERR), soname, SVP(error));
return NULL;
} // endif Hdll
......@@ -745,7 +763,8 @@ int COLDEF::Define(PGLOBAL g, void *, PCOLINFO cfp, int poff)
if (cfp->Datefmt)
Decode = (PSZ)PlugDup(g, cfp->Datefmt);
} // endif special
} else
Offset = poff;
if (cfp->Fieldfmt)
Fmt = (PSZ)PlugDup(g, cfp->Fieldfmt);
......
......@@ -54,6 +54,7 @@
extern int num_read, num_there, num_eq[2]; // Statistics
static const longlong M2G = 0x80000000;
static const longlong M4G = (longlong)2 * M2G;
char BINCOL::Endian = 'H';
/***********************************************************************/
/* External function. */
......@@ -373,18 +374,71 @@ int TDBFIX::WriteDB(PGLOBAL g)
BINCOL::BINCOL(PGLOBAL g, PCOLDEF cdp, PTDB tp, PCOL cp, int i, PSZ am)
: DOSCOL(g, cdp, tp, cp, i, am)
{
Fmt = (cdp->GetFmt()) ? toupper(*cdp->GetFmt()) : 'X';
char *fmt = cdp->GetFmt();
Buff = NULL;
M = GetTypeSize(Buf_Type, sizeof(longlong));
Lim = 0;
if (fmt) {
Fmt = 'H';
for (N = 0, i = 0; fmt[i]; i++)
if (isdigit(fmt[i]))
N = (N * 10 + (fmt[i] - '0'));
else
Fmt = toupper(fmt[i]);
if (N == GetTypeSize(Buf_Type, -1) && (Fmt == 'H' || Fmt == Endian)) {
// New format is a no op
N = 0;
Fmt = 'X';
} else if (Fmt == 'L' || Fmt == 'B' || Fmt == 'H') {
// This is a new format
if (!N)
N = GetTypeSize(Buf_Type, Long);
if (Fmt == 'H')
Fmt = Endian;
Buff = (char*)PlugSubAlloc(g, NULL, M);
memset(Buff, 0, M);
Lim = MY_MIN(N, M);
} // endif Fmt
} else {
N = 0;
Fmt = GetDomain() ? 'C' : 'X';
} // endif fmt
} // end of BINCOL constructor
/***********************************************************************/
/* FIXCOL constructor used for copying columns. */
/* BINCOL constructor used for copying columns. */
/* tdbp is the pointer to the new table descriptor. */
/***********************************************************************/
BINCOL::BINCOL(BINCOL *col1, PTDB tdbp) : DOSCOL(col1, tdbp)
{
Fmt = col1->Fmt;
N = col1->N;
M = col1->M;
Lim = col1->Lim;
} // end of BINCOL copy constructor
/***********************************************************************/
/* Set Endian according to the host setting. */
/***********************************************************************/
void BINCOL::SetEndian(void)
{
union {
short S;
char C[sizeof(short)];
};
S = 1;
Endian = (C[0] == 1) ? 'L' : 'B';
} // end of SetEndian
/***********************************************************************/
/* ReadColumn: what this routine does is to access the last line */
/* read from the corresponding table and extract from it the field */
......@@ -416,19 +470,35 @@ void BINCOL::ReadColumn(PGLOBAL g)
/*********************************************************************/
/* Set Value from the line field. */
/*********************************************************************/
switch (Fmt) {
if (N) {
for (int i = 0; i < Lim; i++)
if (Fmt == 'B' && Endian == 'L')
Buff[i] = p[N - i - 1];
else if (Fmt == 'L' && Endian == 'B')
Buff[M - i - 1] = p[i];
else if (Endian == 'B')
Buff[M - i - 1] = p[N - i - 1];
else
Buff[i] = p[i];
if (IsTypeChar(Buf_Type))
Value->SetValue(*(longlong*)Buff);
else
Value->SetBinValue(Buff);
} else switch (Fmt) {
case 'X': // Standard not converted values
Value->SetBinValue(p);
break;
case 'S': // Short integer
Value->SetValue((int)*(short*)p);
Value->SetValue(*(short*)p);
break;
case 'T': // Tiny integer
Value->SetValue((int)*p);
Value->SetValue(*p);
break;
case 'L': // Long Integer
strcpy(g->Message, "Format L is deprecated, use I");
longjmp(g->jumper[g->jump_level], 11);
// case 'L': // Long Integer
// strcpy(g->Message, "Format L is deprecated, use I");
// longjmp(g->jumper[g->jump_level], 11);
case 'I': // Integer
Value->SetValue(*(int*)p);
break;
......@@ -490,14 +560,34 @@ void BINCOL::WriteColumn(PGLOBAL g)
/* Updating will be done only during the second pass (Status=true) */
/* Conversion occurs if the external format Fmt is specified. */
/*********************************************************************/
switch (Fmt) {
if (N) {
if (IsTypeChar(Buf_Type))
*(longlong *)Buff = Value->GetBigintValue();
else if (Value->GetBinValue(Buff, M, Status)) {
sprintf(g->Message, MSG(BIN_F_TOO_LONG),
Name, Value->GetSize(), M);
longjmp(g->jumper[g->jump_level], 31);
} // endif Buff
if (Status)
for (int i = 0; i < Lim; i++)
if (Fmt == 'B' && Endian == 'L')
p[N - i - 1] = Buff[i];
else if (Fmt == 'L' && Endian == 'B')
p[i] = Buff[M - i - 1];
else if (Endian == 'B')
p[N - i - 1] = Buff[M - i - 1];
else
p[i] = Buff[i];
} else switch (Fmt) {
case 'X':
// Standard not converted values
if (Value->GetBinValue(p, Long, Status)) {
sprintf(g->Message, MSG(BIN_F_TOO_LONG),
Name, Value->GetSize(), Long);
longjmp(g->jumper[g->jump_level], 31);
} // endif Fmt
} // endif p
break;
case 'S': // Short integer
......@@ -535,7 +625,7 @@ void BINCOL::WriteColumn(PGLOBAL g)
break;
case 'B': // Large (big) integer
if (Status)
*(longlong *)p = (longlong)Value->GetBigintValue();
*(longlong *)p = Value->GetBigintValue();
break;
case 'F': // Float
......
/*************** TabDos H Declares Source Code File (.H) ***************/
/* Name: TABFIX.H Version 2.3 */
/* Name: TABFIX.H Version 2.4 */
/* */
/* (C) Copyright to the author Olivier BERTRAND 1999-2012 */
/* (C) Copyright to the author Olivier BERTRAND 1999-2015 */
/* */
/* This file contains the TDBFIX and (FIX/BIN)COL classes declares. */
/***********************************************************************/
......@@ -69,16 +69,27 @@ class DllExport BINCOL : public DOSCOL {
// Implementation
virtual int GetAmType(void) {return TYPE_AM_BIN;}
int GetDeplac(void) {return Deplac;}
int GetFileSize(void)
{return N ? N : GetTypeSize(Buf_Type, Long);}
// Methods
virtual void ReadColumn(PGLOBAL g);
virtual void WriteColumn(PGLOBAL g);
// Static
static void SetEndian(void);
protected:
BINCOL(void) {} // Default constructor not to be used
// Members
char Fmt; // The column numeric format
static char Endian; // The host endian setting (L or B)
char *Buff; // Utility buffer
char Fmt; // The file endian setting or old format
int N; // The number of bytes in the file
int M; // The buffer type size
int Lim; // Min(N,M)
}; // end of class BINCOL
/***********************************************************************/
......
......@@ -372,6 +372,7 @@ bool JSONDEF::DefineAM(PGLOBAL g, LPCSTR, int poff)
Pretty = GetIntCatInfo("Pretty", 2);
Level = GetIntCatInfo("Level", 0);
Limit = GetIntCatInfo("Limit", 10);
Base = GetIntCatInfo("Base", 0);
return DOSDEF::DefineAM(g, "DOS", poff);
} // end of DefineAM
......@@ -440,6 +441,7 @@ TDBJSN::TDBJSN(PJDEF tdp, PTXF txfp) : TDBDOS(tdp, txfp)
Xcol = tdp->Xcol;
Limit = tdp->Limit;
Pretty = tdp->Pretty;
B = tdp->Base ? 1 : 0;
Strict = tdp->Strict;
} else {
Jmode = MODE_OBJECT;
......@@ -447,11 +449,12 @@ TDBJSN::TDBJSN(PJDEF tdp, PTXF txfp) : TDBDOS(tdp, txfp)
Xcol = NULL;
Limit = 1;
Pretty = 0;
B = 0;
Strict = false;
} // endif tdp
Fpos = -1;
N = 0;
N = M = 0;
NextSame = 0;
SameRow = 0;
Xval = -1;
......@@ -469,10 +472,12 @@ TDBJSN::TDBJSN(TDBJSN *tdbp) : TDBDOS(NULL, tdbp)
Xcol = tdbp->Xcol;
Fpos = tdbp->Fpos;
N = tdbp->N;
M = tdbp->M;
Limit = tdbp->Limit;
NextSame = tdbp->NextSame;
SameRow = tdbp->SameRow;
Xval = tdbp->Xval;
B = tdbp->B;
Pretty = tdbp->Pretty;
Strict = tdbp->Strict;
Comma = tdbp->Comma;
......@@ -563,7 +568,7 @@ PJSON TDBJSN::FindRow(PGLOBAL g)
jsp->GetObject()->GetValue(objpath) : NULL;
} else if (objpath[strlen(objpath)-1] == ']') {
val = (jsp->GetType() == TYPE_JAR) ?
jsp->GetArray()->GetValue(atoi(objpath+1) - 1) : NULL;
jsp->GetArray()->GetValue(atoi(objpath+1) - B) : NULL;
} else
val = NULL;
......@@ -649,6 +654,7 @@ int TDBJSN::ReadDB(PGLOBAL g)
if (NextSame) {
SameRow = NextSame;
NextSame = 0;
M++;
return RC_OK;
} else if ((rc = TDBDOS::ReadDB(g)) == RC_OK)
if (!IsRead() && ((rc = ReadBuffer(g)) != RC_OK)) {
......@@ -660,6 +666,7 @@ int TDBJSN::ReadDB(PGLOBAL g)
Row = FindRow(g);
SameRow = 0;
Fpos++;
M = 1;
rc = RC_OK;
} // endif's
......@@ -708,7 +715,7 @@ int TDBJSN::MakeTopTree(PGLOBAL g, PJSON jsp)
val->SetValue(arp);
val = new(g) JVALUE;
i = atoi(objpath+1) - 1;
i = atoi(objpath+1) - B;
arp->SetValue(g, val, i);
arp->InitArray(g);
} else {
......@@ -856,8 +863,8 @@ bool JSONCOL::SetArrayOptions(PGLOBAL g, char *p, int i, PSZ nm)
return true;
else if (jnp->Op != OP_EXP) {
if (b) {
// Return 1st value
jnp->Rank = 1;
// Return 1st value (B is the index base)
jnp->Rank = Tjp->B;
jnp->Op = OP_EQ;
} else if (!Value->IsTypeNum()) {
jnp->CncVal = AllocateValue(g, (void*)", ", TYPE_STRING);
......@@ -868,13 +875,9 @@ bool JSONCOL::SetArrayOptions(PGLOBAL g, char *p, int i, PSZ nm)
} // endif OP
} else if (dg) {
if (atoi(p) > 0) {
// Return nth value
jnp->Rank = atoi(p);
jnp->Rank = atoi(p) - Tjp->B;
jnp->Op = OP_EQ;
} else // Ignore array
jnp->Op = OP_NULL;
} else if (n == 1) {
// Set the Op value;
switch (*p) {
......@@ -1118,17 +1121,13 @@ PVAL JSONCOL::GetColumnValue(PGLOBAL g, PJSON row, int i)
arp = (PJAR)row;
if (!Nodes[i].Key) {
if (Nodes[i].Op != OP_NULL) {
if (Nodes[i].Rank) {
val = arp->GetValue(Nodes[i].Rank - 1);
} else if (Nodes[i].Op == OP_EXP) {
if (Nodes[i].Op == OP_EQ)
val = arp->GetValue(Nodes[i].Rank);
else if (Nodes[i].Op == OP_EXP)
return ExpandArray(g, arp, i);
} else
else
return CalculateArray(g, arp, i);
} else
val = NULL;
} else if (i < Nod-1) {
strcpy(g->Message, "Unexpected array");
val = NULL; // Not an expected array
......@@ -1289,17 +1288,13 @@ PJSON JSONCOL::GetRow(PGLOBAL g)
break;
case TYPE_JAR:
if (!Nodes[i].Key) {
if (Nodes[i].Op != OP_NULL) {
arp = (PJAR)row;
if (Nodes[i].Rank)
val = arp->GetValue(Nodes[i].Rank - 1);
if (Nodes[i].Op == OP_EQ)
val = arp->GetValue(Nodes[i].Rank);
else
val = arp->GetValue(Nodes[i].Rx);
} else
val = NULL;
} else {
strcpy(g->Message, "Unexpected array");
val = NULL; // Not an expected array
......@@ -1390,8 +1385,8 @@ void JSONCOL::WriteColumn(PGLOBAL g)
} // endif jsp
if (arp) {
if (Nod > 1 && Nodes[Nod-2].Rank)
arp->SetValue(g, new(g) JVALUE(jsp), Nodes[Nod-2].Rank-1);
if (Nod > 1 && Nodes[Nod-2].Op == OP_EQ)
arp->SetValue(g, new(g) JVALUE(jsp), Nodes[Nod-2].Rank);
else
arp->AddValue(g, new(g) JVALUE(jsp));
......@@ -1411,8 +1406,8 @@ void JSONCOL::WriteColumn(PGLOBAL g)
case TYPE_INT:
case TYPE_DOUBLE:
if (arp) {
if (Nodes[Nod-1].Rank)
arp->SetValue(g, new(g) JVALUE(g, Value), Nodes[Nod-1].Rank-1);
if (Nodes[Nod-1].Op == OP_EQ)
arp->SetValue(g, new(g) JVALUE(g, Value), Nodes[Nod-1].Rank);
else
arp->AddValue(g, new(g) JVALUE(g, Value));
......@@ -1562,7 +1557,7 @@ int TDBJSON::MakeDocument(PGLOBAL g)
arp = jsp->GetArray();
objp = NULL;
i = atoi(objpath+1) - 1;
i = atoi(objpath+1) - B;
val = arp->GetValue(i);
if (!val) {
......@@ -1750,6 +1745,7 @@ int TDBJSON::ReadDB(PGLOBAL)
if (NextSame) {
SameRow = NextSame;
NextSame = false;
M++;
rc = RC_OK;
} else if (++Fpos < (signed)Doc->size()) {
Row = Doc->GetValue(Fpos);
......@@ -1758,6 +1754,7 @@ int TDBJSON::ReadDB(PGLOBAL)
Row = ((PJVAL)Row)->GetJson();
SameRow = 0;
M = 1;
rc = RC_OK;
} else
rc = RC_EF;
......
......@@ -57,6 +57,7 @@ class JSONDEF : public DOSDEF { /* Table description */
int Limit; /* Limit of multiple values */
int Pretty; /* Depends on file structure */
int Level; /* Used for catalog table */
int Base; /* Tne array index base */
bool Strict; /* Strict syntax checking */
}; // end of JSONDEF
......@@ -84,7 +85,7 @@ class TDBJSN : public TDBDOS {
virtual PCOL MakeCol(PGLOBAL g, PCOLDEF cdp, PCOL cprec, int n);
virtual PCOL InsertSpecialColumn(PCOL colp);
virtual int RowNumber(PGLOBAL g, bool b = FALSE)
{return (b) ? N : Fpos + 1;}
{return (b) ? M : N;}
// Database routines
virtual int Cardinality(PGLOBAL g);
......@@ -106,13 +107,14 @@ class TDBJSN : public TDBDOS {
char *Objname; // The table object name
char *Xcol; // Name of expandable column
int Fpos; // The current row index
//int Spos; // DELETE start index
int N; // The current Rownum
int M; // Index of multiple value
int Limit; // Limit of multiple values
int Pretty; // Depends on file structure
int NextSame; // Same next row
int SameRow; // Same row nb
int Xval; // Index of expandable array
int B; // Array index base
bool Strict; // Strict syntax checking
bool Comma; // Row has final comma
}; // end of class TDBJSN
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment