1
0
mirror of https://github.com/sqlite/sqlite.git synced 2025-08-08 14:02:16 +03:00

Merge the latest trunk changes into the wal2 branch.

FossilOrigin-Name: b17f5beab892028fa3e246858f7b34e9104ffb4edd970804e31e5c71bcd5bbc5
This commit is contained in:
drh
2025-05-06 21:38:45 +00:00
38 changed files with 2568 additions and 2854 deletions

View File

@@ -884,7 +884,7 @@ RCC = $(RCC) -DSQLITE_ENABLE_API_ARMOR=1
!ENDIF
!IF $(DEBUG)>2
TCC = $(TCC) -DSQLITE_DEBUG=1
TCC = $(TCC) -DSQLITE_DEBUG=1 -DSQLITE_USE_W32_FOR_CONSOLE_IO
RCC = $(RCC) -DSQLITE_DEBUG=1
!IF $(DYNAMIC_SHELL)==0
TCC = $(TCC) -DSQLITE_ENABLE_WHERETRACE -DSQLITE_ENABLE_SELECTTRACE
@@ -1973,10 +1973,6 @@ fuzzcheck.exe: $(FUZZCHECK_SRC) $(SQLITE3C) $(SQLITE3H)
fuzzcheck-asan.exe: $(FUZZCHECK_SRC) $(SQLITE3C) $(SQLITE3H)
$(LTLINK) $(NO_WARN) /fsanitize=address $(FUZZCHECK_OPTS) $(FUZZCHECK_SRC) $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)
run-fuzzcheck: fuzzcheck.exe fuzzcheck-asan.exe
fuzzcheck --spinner $(FUZZDB)
fuzzcheck-asan --spinner $(FUZZDB)
ossshell.exe: $(OSSSHELL_SRC) $(SQLITE3C) $(SQLITE3H)
$(LTLINK) $(NO_WARN) $(FUZZCHECK_OPTS) $(OSSSHELL_SRC) $(SQLITE3C) /link $(LDFLAGS) $(LTLINKOPTS)

View File

@@ -724,7 +724,7 @@ RCC = $(RCC) -DSQLITE_ENABLE_API_ARMOR=1
!ENDIF
!IF $(DEBUG)>2
TCC = $(TCC) -DSQLITE_DEBUG=1
TCC = $(TCC) -DSQLITE_DEBUG=1 -DSQLITE_USE_W32_FOR_CONSOLE_IO
RCC = $(RCC) -DSQLITE_DEBUG=1
!IF $(DYNAMIC_SHELL)==0
TCC = $(TCC) -DSQLITE_ENABLE_WHERETRACE -DSQLITE_ENABLE_SELECTTRACE

View File

@@ -34,8 +34,8 @@ INSTALL.noexec = $(INSTALL) -m 0644
# any given teaish build.
#
tx.name = @TEAISH_NAME@
tx.pkgName = @TEAISH_PKGNAME@
tx.version = @TEAISH_VERSION@
tx.name.pkg = @TEAISH_PKGNAME@
tx.libdir = @TEAISH_LIBDIR_NAME@
tx.loadPrefix = @TEAISH_LOAD_PREFIX@
tx.tcl = @TEAISH_TCL@
@@ -46,7 +46,13 @@ tx.dll9.basename = @TEAISH_DLL9_BASENAME@
tx.dll8 = @TEAISH_DLL8@
tx.dll9 = @TEAISH_DLL9@
tx.dll = $(tx.dll$(TCL_MAJOR_VERSION))
tx.dir = @TEAISH_DIR@
tx.dir = @TEAISH_EXT_DIR@
@if TEAISH_DIST_NAME
tx.name.dist = @TEAISH_DIST_NAME@
@else
tx.name.dist = $(teaish.name)
@endif
teaish.dir = @abs_top_srcdir@
#teaish.dir.autosetup = @TEAISH_AUTOSETUP_DIR@
@@ -69,6 +75,7 @@ libdir = @libdir@
libexecdir = @libexecdir@
localstatedir = @localstatedir@
mandir = @mandir@
prefix = @prefix@
runstatedir = @runstatedir@
sbindir = @sbindir@
sharedstatedir = @sharedstatedir@
@@ -81,16 +88,17 @@ sysconfdir = @sysconfdir@
#
TCLSH = @TCLSH_CMD@
TCL_CONFIG_SH = @TCL_CONFIG_SH@
TCL_INCLUDE_SPEC = @TCL_INCLUDE_SPEC@
TCL_LIB_SPEC = @TCL_LIB_SPEC@
TCL_STUB_LIB_SPEC = @TCL_STUB_LIB_SPEC@
TCL_EXEC_PREFIX = @TCL_EXEC_PREFIX@
TCL_VERSION = @TCL_VERSION@
TCL_INCLUDE_SPEC = @TCL_INCLUDE_SPEC@
TCL_LIBS = @TCL_LIBS@
TCL_LIB_SPEC = @TCL_LIB_SPEC@
TCL_MAJOR_VERSION = @TCL_MAJOR_VERSION@
TCL_MINOR_VERSION = @TCL_MINOR_VERSION@
TCL_PATCH_LEVEL = @TCL_PATCH_LEVEL@
TCL_PREFIX = @TCL_PREFIX@
TCL_SHLIB_SUFFIX = @TCL_SHLIB_SUFFIX@
TCL_LIBS = @TCL_LIBS@
TCL_STUB_LIB_SPEC = @TCL_STUB_LIB_SPEC@
TCL_VERSION = @TCL_VERSION@
TCLLIBDIR = @TCLLIBDIR@
#
@@ -107,10 +115,15 @@ CFLAGS.configure = @SH_CFLAGS@ @TEAISH_CFLAGS@ @CFLAGS@ @CPPFLAGS@ $(TCL_INCLUDE
# LDFLAGS.configure = LDFLAGS as known at configure-time.
#
# This ordering is deliberate: flags populated via tcl's
# [teaish-ldflags-add] should preceed LDFLAGS (which typically
# [teaish-ldflags-add] should precede LDFLAGS (which typically
# comes from the ./configure command-line invocation).
#
LDFLAGS.configure = @SH_LDFLAGS@ @TEAISH_LDFLAGS@ @LDFLAGS@ $(TCL_STUB_LIB_SPEC)
LDFLAGS.configure = @TEAISH_LDFLAGS@ @LDFLAGS@
#
# Linker flags for linkhing a shared library.
#
LDFLAGS.shlib = @SH_LDFLAGS@
#
# The following tx.XYZ vars may be populated/modified by teaish.tcl
@@ -119,11 +132,11 @@ LDFLAGS.configure = @SH_LDFLAGS@ @TEAISH_LDFLAGS@ @LDFLAGS@ $(TCL_STUB_LIB_SPEC)
#
# tx.src is the list of source or object files to include in the
# (single) compiler invocation. This will initially contain any
# sources passed to [teaish-src-add], but may also be appended to
# by teaish.make.
# (single) compiler/linker invocation. This will initially contain any
# sources passed to [teaish-src-add], but may also be appended to by
# teaish.make.
#
tx.src =@TEAISH_SRC@
tx.src =@TEAISH_EXT_SRC@
#
# tx.CFLAGS is typically set by teaish.make, whereas TEAISH_CFLAGS
@@ -161,38 +174,66 @@ teaish__autogen.deps = \
$(tx.makefile.in) $(teaish.makefile.in) \
$(tx.tcl) \
@TEAISH_PKGINDEX_TCL_IN@ \
@TEAISH_MODULE_TEST_TCL@ \
@AUTODEPS@
@if TEAISH_MAKEFILE_IN
$(tx.makefile): $(tx.makefile.in)
@endif
teaish.autoreconfig = \
@TEAISH_AUTORECONFIG@
#
# Problem: when more than one target can invoke TEAISH_AUTORECONFIG,
# we can get parallel reconfigures running. Thus, targets which
# may require reconfigure should depend on...
#
config.log: $(teaish__autogen.deps)
@TEAISH_AUTORECONFIG@
$(teaish.autoreconfig)
# ^^^ We would love to skip this when running [dist]clean, but there's
# no POSIX Make-portable way to do that. GNU Make can.
.PHONY: reconfigure
reconfigure:
@TEAISH_AUTORECONFIG@
$(teaish.autoreconfig)
$(teaish.makefile): $(teaish__auto.def) $(teaish.makefile.in) \
@AUTODEPS@
@if TEAISH_TESTER_TCL_IN
@TEAISH_TESTER_TCL_IN@:
@TEAISH_TESTER_TCL@: @TEAISH_TESTER_TCL_IN@
config.log: @TEAISH_TESTER_TCL@
@endif
#
# CC variant for compiling Tcl-using sources.
#
CC.tcl = \
$(CC) -o $@ $(CFLAGS.configure) $(CFLAGS) $(tx.CFLAGS)
#
# CC variant for linking $(tx.src) into an extension DLL. Note that
# $(tx.src) must come before $(LDFLAGS...) for linking to third-party
# libs to work.
#
CC.dll = \
$(CC.tcl) $(tx.src) $(LDFLAGS.shlib) \
$(LDFLAGS.configure) $(LDFLAGS) $(tx.LDFLAGS) $(TCL_STUB_LIB_SPEC)
@if TEAISH_ENABLE_DLL
#
# The rest of this makefile exists solely to support this brief
# target: the extension shared lib.
#
$(tx.dll): $(tx.src) config.log
$(CC) -o $@ $(CFLAGS.configure) $(CFLAGS) $(tx.CFLAGS) \
$(tx.src) $(LDFLAGS.configure) $(LDFLAGS) $(tx.LDFLAGS)
@if [ "x" = "x$(tx.src)" ]; then \
echo "Makefile var tx.src (source/object files) is empty" 1>&2; \
exit 1; \
fi
$(CC.dll)
all: $(tx.dll)
@endif # TEAISH_ENABLE_DLL
tclsh: $(teaish.makefile) config.log
@{ echo "#!/bin/sh"; echo 'exec $(TCLSH) "$$@"'; } > $@
@@ -200,23 +241,29 @@ tclsh: $(teaish.makefile) config.log
@echo "Created $@"
#
# If the extension includes teaish.test.tcl then provide a "test"
# target which which runs that script, passing it (1) the full path to
# extension's DLL (which also provides the script with a way to get
# the test directory) and (2) a script of test utility code intended for
# sourcing by the client.
# Run the generated test script.
#
# If the extension has no test script, add a small one which
# simply loads the DLL and success if it can.
#
#
tx.tester.args = $(tx.dll) $(tx.loadPrefix) @TEAISH_MODULE_TEST_TCL@
.PHONY: test-pre test-core test test-post test-extension
test-extension: # this name is reserved for use by teaish.make
test-prepre: $(tx.dll) @TEAISH_TESTER_TCL@
.PHONY: test-pre test-prepre test-core test test-post test-extension
test-extension: # this name is reserved for use by teaish.make[.in]
@if TEAISH_ENABLE_DLL
test-prepre: $(tx.dll)
@endif
@if TEAISH_TESTER_TCL
test-core.args = @TEAISH_TESTER_TCL@
@if TEAISH_ENABLE_DLL
test-core.args += '$(tx.dll)' '$(tx.loadPrefix)'
@else
test-core.args += '' ''
@endif
test-core.args += @TEAISH_TESTUTIL_TCL@
test-core: test-pre
$(TCLSH) $(test-core.args)
test-prepre: @TEAISH_TESTER_TCL@
@else # !TEAISH_TESTER_TCL
test-prepre:
@endif # TEAISH_TESTER_TCL
test-pre: test-prepre
test-core: test-pre
$(TCLSH) @TEAISH_TESTER_TCL@ $(tx.tester.args)
test-post: test-core
test: test-post
@@ -232,7 +279,6 @@ clean-post: clean-core
clean: clean-post
.PHONY: distclean-pre distclean-core distclean-post clean-extension
distclean-extension: # this name is reserved for use by teaish.make
distclean-pre: clean
distclean-core: distclean-pre
rm -f Makefile
@@ -254,6 +300,7 @@ distclean-core: distclean-pre
@if TEAISH_TEST_TCL_IN
rm -f @TEAISH_TEST_TCL@
@endif
distclean-extension: # this name is reserved for use by teaish.make
distclean-post: distclean-core
distclean: distclean-post
@@ -275,14 +322,16 @@ install-core: install-pre
$(INSTALL.noexec) @TEAISH_PKGINIT_TCL@ "$(DESTDIR)$(TCLLIBDIR)"
@endif
install-test: install-core
@echo 'package require $(tx.pkgName) $(tx.version)' > $@.tcl
@echo "Post-install test of [package require $(tx.pkgName) $(tx.version)]..."
@if $(TCLSH) $@.tcl ; then \
echo "test passed"; \
rm -f $@.tcl; \
@echo "Post-install test of [package require $(tx.name.pkg) $(tx.version)]..."; \
if echo \
'set c 0; ' \
'@TEAISH_POSTINST_PREREQUIRE@' \
'if {[catch {package require $(tx.name.pkg) $(tx.version)}]} {incr c};' \
'exit $$c' \
| $(TCLSH) ; then \
echo "passed"; \
else \
echo "TEST FAILED"; \
rm -f $@.tcl; \
echo "FAILED"; \
exit 1; \
fi
install-post: install-test
@@ -300,35 +349,50 @@ uninstall-post: uninstall-core
@echo "Uninstalled Tcl extension $(tx.name) $(tx.version)"
uninstall: uninstall-post
Makefile: config.log $(teaish.makefile.in)
@if TEAISH_MAKEFILE_IN
config.log: @TEAISH_MAKEFILE_IN@
Makefile: $(tx.makefile.in)
config.log: $(teaish.makefile.in)
@endif
#
# Package archive generation ("dist") rules...
#
@if TEAISH_ENABLE_DIST
@if BIN_TAR
@if BIN_ZIP
# When installing teaish as part of "make dist", we need to run
# configure with similar flags to what we last configured with but we
# must not pass on any extension-specific flags, as those won't be
# recognized when running in --teaish-install mode, causing
# the sub-configure to fail.
dist.flags = --with-tclsh=$(TCLSH)
dist.reconfig = $(teaish.dir)/configure $(dist.flags)
# Temp dir for dist.zip. Must be different than dist.tgz or else
# parallel builds may hose the dist.
teaish__dist.tmp.zip = teaish__dist_zip
#
# Make a distribution zip file...
#
dist.zip.dir = $(tx.name)-$(tx.version)
dist.zip = $(dist.zip.dir).zip
dist.basename = $(tx.name.dist)-$(tx.version)
dist.zip = $(dist.basename).zip
.PHONY: dist.zip dist.zip-core dist.zip-post
#dist.zip-pre:
# We apparently can't add a pre-hook here, even if dist.zip-pre is
# .PHONY, else "make dist" rebuilds the archive each time it's run.
# We apparently can't add a pre-hook here, else "make dist" rebuilds
# the archive each time it's run.
$(dist.zip): $(tx.dist.files)
@rm -fr $(teaish__dist.tmp.zip)
@mkdir -p $(teaish__dist.tmp.zip)/$(dist.zip.dir)
@mkdir -p $(teaish__dist.tmp.zip)/$(dist.basename)
@tar cf $(teaish__dist.tmp.zip)/tmp.tar $(tx.dist.files)
@tar xf $(teaish__dist.tmp.zip)/tmp.tar -C $(teaish__dist.tmp.zip)/$(dist.zip.dir)
@rm -f $(dist.zip.dir)/tmp.tar $(dist.zip)
@cd $(teaish__dist.tmp.zip) && zip -q -r ../$(dist.zip) $(dist.zip.dir)
@tar xf $(teaish__dist.tmp.zip)/tmp.tar -C $(teaish__dist.tmp.zip)/$(dist.basename)
@if TEAISH_DIST_FULL
@$(dist.reconfig) \
--teaish-install=$(teaish__dist.tmp.zip)/$(dist.basename) \
--t-e-d=$(teaish__dist.tmp.zip)/$(dist.basename) >/dev/null
@endif
@rm -f $(dist.basename)/tmp.tar $(dist.zip)
@cd $(teaish__dist.tmp.zip) && zip -q -r ../$(dist.zip) $(dist.basename)
@rm -fr $(teaish__dist.tmp.zip)
@ls -la $(dist.zip)
dist.zip-core: $(dist.zip)
@@ -340,23 +404,27 @@ undist-zip:
undist: undist-zip
@endif #BIN_ZIP
@if BIN_TAR
#
# Make a distribution tarball...
#
teaish__dist.tmp.tgz = teaish__dist_tgz
dist.tgz.dir = $(tx.name)-$(tx.version)
dist.tgz = $(dist.tgz.dir).tar.gz
dist.tgz = $(dist.basename).tar.gz
.PHONY: dist.tgz dist.tgz-core dist.tgz-post
# dist.tgz-pre:
# see notes in dist.zip
$(dist.tgz): $(tx.dist.files)
@rm -fr $(teaish__dist.tmp.tgz)
@mkdir -p $(teaish__dist.tmp.tgz)/$(dist.tgz.dir)
@mkdir -p $(teaish__dist.tmp.tgz)/$(dist.basename)
@tar cf $(teaish__dist.tmp.tgz)/tmp.tar $(tx.dist.files)
@tar xf $(teaish__dist.tmp.tgz)/tmp.tar -C $(teaish__dist.tmp.tgz)/$(dist.tgz.dir)
@rm -f $(dist.tgz.dir)/tmp.tar $(dist.tgz)
@cd $(teaish__dist.tmp.tgz) && tar czf ../$(dist.tgz) $(dist.tgz.dir)
@tar xf $(teaish__dist.tmp.tgz)/tmp.tar -C $(teaish__dist.tmp.tgz)/$(dist.basename)
@if TEAISH_DIST_FULL
@rm -f $(teaish__dist.tmp.tgz)/$(dist.basename)/pkgIndex.tcl.in; # kludge
@$(dist.reconfig) \
--teaish-install=$(teaish__dist.tmp.tgz)/$(dist.basename) \
--t-e-d=$(teaish__dist.tmp.zip)/$(dist.basename) >/dev/null
@endif
@rm -f $(dist.basename)/tmp.tar $(dist.tgz)
@cd $(teaish__dist.tmp.tgz) && tar czf ../$(dist.tgz) $(dist.basename)
@rm -fr $(teaish__dist.tmp.tgz)
@ls -la $(dist.tgz)
dist.tgz-core: $(dist.tgz)
@@ -366,16 +434,34 @@ dist: dist.tgz
undist-tgz:
rm -f $(dist.tgz)
undist: undist-tgz
@else #!BIN_TAR
dist:
@echo "The dist rules require tar, which configure did not find." 1>&2; exit 1
@endif #BIN_TAR
@else
@else #!TEAISH_ENABLE_DIST
undist:
dist:
@if TEAISH_OUT_OF_EXT_TREE
@echo "'dist' can only be used from an extension's home dir" 1>&2; \
echo "In this case: @TEAISH_DIR@" 1>&2; exit 1
echo "In this case: @TEAISH_EXT_DIR@" 1>&2; exit 1
@endif
@endif #TEAISH_ENABLE_DIST
Makefile: @TEAISH_TCL@
@if TEAISH_MAKEFILE_CODE
#
# TEAISH_MAKEFILE_CODE may contain literal makefile code, which
# gets pasted verbatim here. Either [define TEAISH_MAKEFILE_CODE
# ...] or use [teaish-make-add] to incrementally build up this
# content.
#
# <TEAISH_MAKEFILE_CODE>
@TEAISH_MAKEFILE_CODE@
# </TEAISH_MAKEFILE_CODE>
@endif
@if TEAISH_MAKEFILE
#
# TEAISH_MAKEFILE[_IN] defines any extension-specific state this file
# needs.
@@ -386,7 +472,7 @@ dist:
# - tx.src = list of the extension's source files, being sure to
# prefix each with $(tx.dir) (if it's in the same dir as the
# extension) so that out-of-tree builds can find them. Optionally,
# [define] TEAISH_SRC or pass them to [teaish-src-add].
# [define] TEAISH_EXT_SRC or pass them to [teaish-src-add].
#
# It may optionally set the following vars:
#
@@ -399,31 +485,12 @@ dist:
# It may optionally hook into various targets as documented in
# /doc/extensions.md in the canonical teaish source tree.
#
# Interestingly, we don't have to pre-filter teaish.makefile.in -
# we can just import it into here. That skips its teaish-specific
# validation though. Hmm.
# Interestingly, we don't have to pre-filter teaish.makefile.in - we
# can just @include it here. That skips its teaish-specific validation
# though. Hmm.
#
#@if TEAISH_MAKEFILE_IN
## TEAISH_MAKEFILE_IN ==>
#Makefile: @TEAISH_MAKEFILE_IN@
#@include @TEAISH_MAKEFILE_IN@
#@endif
#@if !TEAISH_MAKEFILE_IN
@if TEAISH_MAKEFILE
# TEAISH_MAKEFILE ==>
# <TEAISH_MAKEFILE>
Makefile: @TEAISH_MAKEFILE@
@include @TEAISH_MAKEFILE@
@endif #TEAISH_MAKEFILE
#@endif #!TEAISH_MAKEFILE_IN
#
# TEAISH_MAKEFILE_CODE may contain literal makefile code, which
# gets pasted verbatim here. Either [define TEAISH_MAKEFILE_CODE
# ...] or use [teaish-make-add] to incrementally build up this
# content.
#
@if TEAISH_MAKEFILE_CODE
# TEAISH_MAKEFILE_CODE ==>
Makefile: @TEAISH_TCL@
@TEAISH_MAKEFILE_CODE@
@endif #TEAISH_MAKEFILE_CODE
# </TEAISH_MAKEFILE>
@endif

View File

@@ -1,7 +1,8 @@
#/do/not/tclsh
# ^^^ help out editors which guess this file's content type.
#
# Main configure script entry point for the "TEA-via-autosetup"
# framework.
# Main configure script entry point for the TEA(ish) framework. All
# extension-specific customization goes in teaish.tcl.in or
# teaish.tcl.
use teaish/core
teaish-configure-core

File diff suppressed because it is too large Load Diff

View File

@@ -186,7 +186,7 @@ proc teaish-import-features {args} {
#
if {$flags(-run) && "" ne $pk} {
proj-error "Cannot use both -run and $pk" \
" (called from [proj-current-scope 1])"
" (called from [proj-scope 1])"
}
foreach arg $args {

View File

@@ -29,6 +29,7 @@
# call). If $lvl would resolve to global scope "global scope" is
# returned and if it would be negative then a string indicating such
# is returned (as opposed to throwing an error).
#
proc test-current-scope {{lvl 0}} {
#uplevel [expr {$lvl + 1}] {lindex [info level 0] 0}
set ilvl [info level]
@@ -45,6 +46,7 @@ proc test-current-scope {{lvl 0}} {
# @test-msg
#
# Emits all arugments to stdout.
#
proc test-msg {args} {
puts "$args"
}
@@ -113,12 +115,15 @@ proc test-assert {testId script {msg ""}} {
# @test-expect testId script result
#
# Runs $script in the calling scope and compares its result to
# $result. If they differ, it triggers an [assert].
# $result, minus any leading or trailing whitespace. If they differ,
# it triggers an [assert].
#
proc test-expect {testId script result} {
puts "test $testId"
set x [uplevel 1 $script]
test__assert 1 {$x eq $result} "\nEXPECTED: <<$result>>\nGOT: <<$x>>"
set x [string trim [uplevel 1 $script]]
set result [string trim $result]
test__assert 1 {$x eq $result} \
"\nEXPECTED: <<$result>>\nGOT: <<$x>>"
}
#
@@ -142,7 +147,11 @@ if {![array exists ::teaish__BuildFlags]} {
}
#
# @teaish-build-flag2 flag tgtVar ?dflt?
# @teaish-build-flag3 flag tgtVar ?dflt?
#
# If the current build has the configure-time flag named $flag set
# then tgtVar is assigned its value and 1 is returned, else tgtVal is
# assigned $dflt and 0 is returned.
#
# Caveat #1: only valid when called in the context of teaish's default
# "make test" recipe, e.g. from teaish.test.tcl. It is not valid from
@@ -152,15 +161,11 @@ if {![array exists ::teaish__BuildFlags]} {
# an external script have populated its internal state, which is
# normally handled via teaish.tester.tcl.in.
#
# If the current build has the configure-time flag named $flag set
# then tgtVar is assigned its value and 1 is returned, else tgtVal is
# assigned $dflt and 0 is returned.
#
# Caveat #2: defines in the style of HAVE_FEATURENAME with a value of
# 0 are, by long-standing configure script conventions, treated as
# _undefined_ here.
#
proc teaish-build-flag2 {flag tgtVar {dflt ""}} {
proc teaish-build-flag3 {flag tgtVar {dflt ""}} {
upvar $tgtVar tgt
if {[info exists ::teaish__BuildFlags($flag)]} {
set tgt $::teaish__BuildFlags($flag)
@@ -177,12 +182,12 @@ proc teaish-build-flag2 {flag tgtVar {dflt ""}} {
#
# @teaish-build-flag flag ?dflt?
#
# Convenience form of teaish-build-flag2 which returns the
# Convenience form of teaish-build-flag3 which returns the
# configure-time-defined value of $flag or "" if it's not defined (or
# if it's an empty string).
#
proc teaish-build-flag {flag {dflt ""}} {
set tgt ""
teaish-build-flag2 $flag tgt $dflt
teaish-build-flag3 $flag tgt $dflt
return $tgt
}

View File

@@ -6,14 +6,14 @@
# edit it.
#
# Adapted from https://core.tcl-lang.org/tcltls
@if TEAISH_VSATISFIES_TCL
if {![package vsatisfies [package provide Tcl] @TEAISH_VSATISFIES_TCL@]} {
error "Package @TEAISH_PKGNAME@ @TEAISH_VERSION@ requires Tcl @TEAISH_VSATISFIES_TCL@"
}
@if TEAISH_VSATISFIES_CODE
@TEAISH_VSATISFIES_CODE@
@endif
if {[package vsatisfies [package provide Tcl] 9.0-]} {
package ifneeded {@TEAISH_PKGNAME@} {@TEAISH_VERSION@} [list apply {{dir} {
@if TEAISH_ENABLE_DLL
load [file join $dir {@TEAISH_DLL9@}] @TEAISH_LOAD_PREFIX@
@endif
@if TEAISH_PKGINIT_TCL_TAIL
set initScript [file join $dir {@TEAISH_PKGINIT_TCL_TAIL@}]
if {[file exists $initScript]} {
@@ -23,11 +23,13 @@ if {[package vsatisfies [package provide Tcl] 9.0-]} {
}} $dir]
} else {
package ifneeded {@TEAISH_PKGNAME@} {@TEAISH_VERSION@} [list apply {{dir} {
@if TEAISH_ENABLE_DLL
if {[string tolower [file extension {@TEAISH_DLL8@}]] in [list .dll .dylib .so]} {
load [file join $dir {@TEAISH_DLL8@}] @TEAISH_LOAD_PREFIX@
} else {
load {} @TEAISH_LOAD_PREFIX@
}
@endif
@if TEAISH_PKGINIT_TCL_TAIL
set initScript [file join $dir {@TEAISH_PKGINIT_TCL_TAIL@}]
if {[file exists $initScript]} {

View File

@@ -1,35 +1,83 @@
# Teaish configure script for the SQLite TCL extension
apply {{} {
set version [proj-file-content -trim [get-define TEAISH_DIR]/../VERSION]
proj-assert {[string match 3.*.* $version]}
teaish-pkginfo-set \
-name sqlite \
-pkgName sqlite3 \
-version $version \
-loadPrefix Sqlite3 \
-vsatisfies 8.6- \
-libDir sqlite$version
}}
# Teaish configure script for the SQLite Tcl extension
#
# Object for communicating certain config-time state across various
# auto.def-related pieces.
# State for disparate config-time pieces.
#
array set sqliteConfig [subst [proj-strip-hash-comments {
array set sqlite__Config [proj-strip-hash-comments {
#
# The list of feature --flags which the --all flag implies. This
# requires special handling in a few places.
#
all-flag-enables {fts3 fts4 fts5 rtree geopoly}
}]]
# >0 if building in the canonical tree. -1=undetermined
is-canonical -1
}]
#
# Set up the package info for teaish...
#
apply {{dir} {
# Figure out the version number...
set version ""
if {[file exists $dir/../VERSION]} {
# The canonical SQLite TEA(ish) build
set version [proj-file-content -trim $dir/../VERSION]
set ::sqlite__Config(is-canonical) 1
set distname sqlite-tcl
} elseif {[file exists $dir/generic/tclsqlite3.c]} {
# The copy from the teaish tree, used as a dev/test bed before
# updating SQLite's tree.
set ::sqlite__Config(is-canonical) 0
set fd [open $dir/generic/tclsqlite3.c rb]
while {[gets $fd line] >=0} {
if {[regexp {^#define[ ]+SQLITE_VERSION[ ]+"(3.+)"} \
$line - version]} {
set distname sqlite-teaish
break
}
}
close $fd
}
if {"" eq $version} {
proj-fatal "Cannot determine the SQLite version number"
}
proj-assert {$::sqlite__Config(is-canonical) > -1}
proj-assert {[string match 3.*.* $version]} \
"Unexpected SQLite version: $version"
set pragmas {}
if {$::sqlite__Config(is-canonical)} {
# Disable "make dist" in the canonical tree. That tree is
# generated from several pieces and creating/testing working
# "dist" rules for that sub-build currently feels unnecessary. The
# copy in the teaish tree, though, should be able to "make dist".
lappend pragmas no-dist
} else {
lappend pragmas full-dist
}
teaish-pkginfo-set -vars {
-name sqlite
-name.pkg sqlite3
-version $version
-name.dist $distname
-vsatisfies 8.6-
-libDir sqlite$version
-pragmas $pragmas
}
}} [teaish-get -dir]
#
# Must return either an empty string or a list in the form accepted by
# autosetup's [options] function.
#
proc teaish-options {} {
return [proj-strip-hash-comments [subst -nocommands -nobackslashes {
# These flags and defaults mostly derive from the historical TEA
# build. Some, like ICU, are taken from the canonical SQLite tree.
return [subst -nocommands -nobackslashes {
with-system-sqlite=0
=> {Use the system-level SQLite instead of the copy in this tree.
Also requires use of --override-sqlite-version so that the build
@@ -48,7 +96,7 @@ proc teaish-options {} {
geopoly => {Enable the GEOPOLY extension}
rtree => {Enable the RTREE extension}
session => {Enable the SESSION extension}
all=1 => {Disable $::sqliteConfig(all-flag-enables)}
all=1 => {Disable $::sqlite__Config(all-flag-enables)}
with-icu-ldflags:LDFLAGS
=> {Enable SQLITE_ENABLE_ICU and add the given linker flags for the
ICU libraries. e.g. on Ubuntu systems, try '-licui18n -licuuc -licudata'.}
@@ -61,7 +109,7 @@ proc teaish-options {} {
icu-collations=0
=> {Enable SQLITE_ENABLE_ICU_COLLATIONS. Requires --with-icu-ldflags=...
or --with-icu-config}
}]]
}]
}
#
@@ -69,12 +117,10 @@ proc teaish-options {} {
# work needed for this extension.
#
proc teaish-configure {} {
teaish-enable-dist 0
use teaish/feature-tests
set srcdir [get-define TEAISH_DIR]
teaish-src-add -dist -dir generic/tclsqlite3.c
teaish-cflags-add -I${srcdir}/..
if {[proj-opt-was-provided override-sqlite-version]} {
teaish-pkginfo-set -version [opt-val override-sqlite-version]
proj-warn "overriding sqlite version number:" [teaish-pkginfo-get -version]
@@ -103,6 +149,8 @@ proc teaish-configure {} {
msg-result "Using system-level sqlite3."
teaish-cflags-add -DUSE_SYSTEM_SQLITE
teaish-ldflags-add -lsqlite3
} elseif {$::sqlite__Config(is-canonical)} {
teaish-cflags-add -I[teaish-get -dir]/..
}
teaish-check-librt
@@ -116,9 +164,8 @@ proc teaish-configure {} {
sqlite-handle-common-feature-flags; # must be late in the process
}; # teaish-configure
define OPT_FEATURE_FLAGS {} ; # -DSQLITE_OMIT/ENABLE flags.
########################################################################
#
# Adds $args, if not empty, to OPT_FEATURE_FLAGS. This is intended only for holding
# -DSQLITE_ENABLE/OMIT/... flags, but that is not enforced here.
proc sqlite-add-feature-flag {args} {
@@ -127,7 +174,7 @@ proc sqlite-add-feature-flag {args} {
}
}
########################################################################
#
# Check for log(3) in libm and die with an error if it is not
# found. $featureName should be the feature name which requires that
# function (it's used only in error messages). defines LDFLAGS_MATH to
@@ -148,13 +195,13 @@ proc sqlite-affirm-have-math {featureName} {
}
}
########################################################################
#
# Handle various SQLITE_ENABLE/OMIT_... feature flags.
proc sqlite-handle-common-feature-flags {} {
msg-result "Feature flags..."
if {![opt-bool all]} {
# Special handling for --disable-all
foreach flag $::sqliteConfig(all-flag-enables) {
foreach flag $::sqlite__Config(all-flag-enables) {
if {![proj-opt-was-provided $flag]} {
proj-opt-set $flag 0
}
@@ -175,7 +222,7 @@ proc sqlite-handle-common-feature-flags {} {
# The --geopoly flag, though, will automatically re-enable
# --rtree, so --disable-rtree won't actually disable anything in
# that case.
foreach k $::sqliteConfig(all-flag-enables) {
foreach k $::sqlite__Config(all-flag-enables) {
if {![proj-opt-was-provided $k]} {
proj-opt-set $k 1
}
@@ -206,7 +253,7 @@ proc sqlite-handle-common-feature-flags {} {
}
}
}
########################################################################
#
# Invert the above loop's logic for some SQLITE_OMIT_... cases. If
# config option $boolFlag is false, [sqlite-add-feature-flag
# $featureFlag], where $featureFlag is intended to be
@@ -222,7 +269,7 @@ proc sqlite-handle-common-feature-flags {} {
}
}
#########################################################################
##
# Remove duplicates from the final feature flag sets and show them
# to the user.
set oFF [get-define OPT_FEATURE_FLAGS]
@@ -236,7 +283,7 @@ proc sqlite-handle-common-feature-flags {} {
teaish-cflags-add -define OPT_FEATURE_FLAGS
}; # sqlite-handle-common-feature-flags
########################################################################
#
# If --enable-threadsafe is set, this adds -DSQLITE_THREADSAFE=1 to
# OPT_FEATURE_FLAGS and sets LDFLAGS_PTHREAD to the linker flags
# needed for linking pthread (possibly an empty string). If
@@ -258,10 +305,8 @@ proc sqlite-handle-threadsafe {} {
teaish-ldflags-prepend $ldf
undefine lib_pthread_create
undefine lib_pthread_mutexattr_init
} elseif {[proj-opt-was-provided threadsafe]} {
user-error "Missing required pthread libraries. Use --disable-threadsafe to disable this check."
} else {
msg-result "pthread support not detected"
user-error "Missing required pthread libraries. Use --disable-threadsafe to disable this check."
}
# Recall that LDFLAGS_PTHREAD might be empty even if pthreads if
# found because it's in -lc on some platforms.
@@ -271,30 +316,31 @@ proc sqlite-handle-threadsafe {} {
} else {
#
# If user does not specify --[disable-]threadsafe then select a
# default based on whether it looks like TCL has threading
# default based on whether it looks like Tcl has threading
# support.
#
#puts "TCL_LIBS = [get-define TCL_LIBS]"
if {[string match *pthread* [get-define TCL_LIBS]]} {
# ^^^ FIXME: there must be a better way of testing this
catch {
scan [exec echo {puts [tcl::pkgconfig get threaded]} | [get-define TCLSH_CMD]] \
%d enable
}
if {$enable} {
set flagName "--threadsafe"
set lblAbled "enabled"
set enable 1
msg-result "yes"
msg-result yes
} else {
set flagName "--disable-threadsafe"
set lblAbled "disabled"
set enable 0
msg-result "no"
msg-result no
}
msg-result "NOTICE: defaulting to ${flagName} because TCL has threading ${lblAbled}."
# ^^^ We don't need to link against -lpthread in the is-enabled case.
msg-result "Defaulting to ${flagName} because Tcl has threading ${lblAbled}."
# ^^^ We (probably) don't need to link against -lpthread in the
# is-enabled case. We might in the case of static linking. Unsure.
}
sqlite-add-feature-flag -DSQLITE_THREADSAFE=${enable}
return $enable
}
########################################################################
#
# Handles the --enable-load-extension flag. Returns 1 if the support
# is enabled, else 0. If support for that feature is not found, a
# fatal error is triggered if --enable-load-extension is explicitly
@@ -346,7 +392,7 @@ proc sqlite-handle-load-extension {} {
return $found
}
########################################################################
#
# ICU - International Components for Unicode
#
# Handles these flags:
@@ -446,7 +492,7 @@ proc sqlite-handle-icu {} {
}; # sqlite-handle-icu
########################################################################
#
# Handles the --with-tempstore flag.
#
# The test fixture likes to set SQLITE_TEMP_STORE on its own, so do
@@ -471,7 +517,7 @@ proc sqlite-handle-tempstore {} {
}
}
########################################################################
#
# Handles the --enable-math flag.
proc sqlite-handle-math {} {
proj-if-opt-truthy math {
@@ -490,18 +536,13 @@ proc sqlite-handle-math {} {
}
}
########################################################################
#
# Move -DSQLITE_OMIT... and -DSQLITE_ENABLE... flags from CFLAGS and
# CPPFLAGS to OPT_FEATURE_FLAGS and remove them from BUILD_CFLAGS.
proc sqlite-munge-cflags {} {
# Move CFLAGS and CPPFLAGS entries matching -DSQLITE_OMIT* and
# -DSQLITE_ENABLE* to OPT_FEATURE_FLAGS. This behavior is derived
# from the legacy build and was missing the 3.48.0 release (the
# initial Autosetup port).
# https://sqlite.org/forum/forumpost/9801e54665afd728
#
# Handling of CPPFLAGS, as well as removing ENABLE/OMIT from
# CFLAGS/CPPFLAGS, was missing in the 3.49.0 release as well.
# from the pre-3.48 build.
#
# If any configure flags for features are in conflict with
# CFLAGS/CPPFLAGS-specified feature flags, all bets are off. There

View File

@@ -5,17 +5,27 @@
# probably not edit it.
#
# This is the wrapper script invoked by teaish's "make test" recipe.
@if TEAISH_VSATISFIES_TCL
if {![package vsatisfies [package provide Tcl] @TEAISH_VSATISFIES_TCL@]} {
error "Package @TEAISH_PKGNAME@ @TEAISH_VERSION@ requires Tcl @TEAISH_VSATISFIES_TCL@"
}
# It gets passed 3 args:
#
# $1 = the DLL name, or "" if the extension has no DLL
#
# $2 = the "load prefix" for Tcl's [load] or empty if $1 is empty
#
# $3 = the /path/to/teaish/tester.tcl (test utility code)
#
@if TEAISH_VSATISFIES_CODE
@TEAISH_VSATISFIES_CODE@
@endif
load [lindex $::argv 0] [lindex $::argv 1];
source [lindex $::argv 2]; # teaish/tester.tcl
if {[llength [lindex $::argv 0]] > 0} {
load [file normalize [lindex $::argv 0]] [lindex $::argv 1];
# ----^^^^^^^ needed on Haiku when argv 0 is just a filename, else
# load cannot find the file.
}
source -encoding utf-8 [lindex $::argv 2]; # teaish/tester.tcl
@if TEAISH_PKGINIT_TCL
apply {{file} {
set dir [file dirname $::argv0]
source $file
source -encoding utf-8 $file
}} [join {@TEAISH_PKGINIT_TCL@}]
@endif
@if TEAISH_TEST_TCL
@@ -24,7 +34,7 @@ apply {{file} {
array set ::teaish__BuildFlags @TEAISH__DEFINES_MAP@
set dir [file normalize [file dirname $file]]
#test-fail "Just testing"
source $file
source -encoding utf-8 $file
}} [join {@TEAISH_TEST_TCL@}]
@else # TEAISH_TEST_TCL
# No $TEAISH_TEST_TCL provided, so here's a default test which simply

View File

@@ -9132,7 +9132,7 @@ int Jim_StringEqObj(Jim_Obj *aObjPtr, Jim_Obj *bObjPtr)
const char *sA = Jim_GetString(aObjPtr, &Alen);
const char *sB = Jim_GetString(bObjPtr, &Blen);
return Alen == Blen && memcmp(sA, sB, Alen) == 0;
return Alen == Blen && *sA == *sB && memcmp(sA, sB, Alen) == 0;
}
}
@@ -10242,7 +10242,7 @@ static int JimCommandsHT_KeyCompare(void *privdata, const void *key1, const void
int len1, len2;
const char *str1 = Jim_GetStringNoQualifier((Jim_Obj *)key1, &len1);
const char *str2 = Jim_GetStringNoQualifier((Jim_Obj *)key2, &len2);
return len1 == len2 && memcmp(str1, str2, len1) == 0;
return len1 == len2 && *str1 == *str2 && memcmp(str1, str2, len1) == 0;
}
static void JimCommandsHT_ValDestructor(void *interp, void *val)
@@ -13864,6 +13864,13 @@ static int JimExprOpNumUnary(Jim_Interp *interp, struct JimExprNode *node)
case JIM_EXPROP_NOT:
wC = !bA;
break;
case JIM_EXPROP_UNARYPLUS:
case JIM_EXPROP_UNARYMINUS:
rc = JIM_ERR;
Jim_SetResultFormatted(interp,
"can't use non-numeric string as operand of \"%s\"",
node->type == JIM_EXPROP_UNARYPLUS ? "+" : "-");
break;
default:
abort();
}
@@ -19868,16 +19875,22 @@ wrongargs:
}
else if (errorCodeObj) {
int len = Jim_ListLength(interp, argv[idx + 1]);
int i;
ret = JIM_OK;
if (len > Jim_ListLength(interp, errorCodeObj)) {
for (i = 0; i < len; i++) {
Jim_Obj *matchObj = Jim_ListGetIndex(interp, argv[idx + 1], i);
Jim_Obj *objPtr = Jim_ListGetIndex(interp, errorCodeObj, i);
if (Jim_StringCompareObj(interp, matchObj, objPtr, 0) != 0) {
ret = -1;
break;
ret = -1;
}
else {
int i;
ret = JIM_OK;
for (i = 0; i < len; i++) {
Jim_Obj *matchObj = Jim_ListGetIndex(interp, argv[idx + 1], i);
Jim_Obj *objPtr = Jim_ListGetIndex(interp, errorCodeObj, i);
if (Jim_StringCompareObj(interp, matchObj, objPtr, 0) != 0) {
ret = -1;
break;
}
}
}
}
@@ -20253,7 +20266,7 @@ static int Jim_DictCoreCommand(Jim_Interp *interp, int argc, Jim_Obj *const *arg
}
case OPT_SET:
return Jim_SetDictKeysVector(interp, argv[2], argv + 3, argc - 4, argv[argc - 1], JIM_ERRMSG);
return Jim_SetDictKeysVector(interp, argv[2], argv + 3, argc - 4, argv[argc - 1], JIM_ERRMSG | JIM_UNSHARED);
case OPT_EXISTS:{
int rc = Jim_DictKeysVector(interp, argv[2], argv + 3, argc - 3, &objPtr, JIM_NONE);
@@ -20265,7 +20278,7 @@ static int Jim_DictCoreCommand(Jim_Interp *interp, int argc, Jim_Obj *const *arg
}
case OPT_UNSET:
if (Jim_SetDictKeysVector(interp, argv[2], argv + 3, argc - 3, NULL, JIM_NONE) != JIM_OK) {
if (Jim_SetDictKeysVector(interp, argv[2], argv + 3, argc - 3, NULL, JIM_UNSHARED) != JIM_OK) {
return JIM_ERR;
}
return JIM_OK;

View File

@@ -8,7 +8,13 @@
# * May you find forgiveness for yourself and forgive others.
# * May you share freely, never taking more than you give.
#
########################################################################
#
# ----- @module proj.tcl -----
# @section Project-agnostic Helper APIs
#
#
# Routines for Steve Bennett's autosetup which are common to trees
# managed in and around the umbrella of the SQLite project.
#
@@ -25,13 +31,12 @@
# noted here only as an indication that there are no licensing issues
# despite this code having a handful of near-twins running around a
# handful of third-party source trees.
########################################################################
#
# Design notes:
#
# - Symbols with a suffix of _ are intended for internal use within
# - Symbols with _ separators are intended for internal use within
# this file, and are not part of the API which auto.def files should
# rely on.
# rely on. Symbols with - separators are public APIs.
#
# - By and large, autosetup prefers to update global state with the
# results of feature checks, e.g. whether the compiler supports flag
@@ -49,10 +54,7 @@
# test, downstream tests may not like the $prefix/lib path added by
# the rpath test. To avoid such problems, we avoid (intentionally)
# updating global state via feature tests.
########################################################################
# ----- @module proj.tcl -----
# @section Project Helper APIs
#
#
# $proj__Config is an internal-use-only array for storing whatever generic
@@ -83,7 +85,7 @@ set proj__Config(isatty) [isatty? stdout]
#
proc proj-warn {args} {
show-notices
puts stderr [join [list "WARNING:" {*}$args] " "]
puts stderr [join [list "WARNING: \[[proj-scope 1]\]: " {*}$args] " "]
}
#
@@ -103,7 +105,7 @@ proc proj-fatal {args} {
set args [lassign $args -]
incr lvl
}
puts stderr [join [list "FATAL: \[[proj-current-scope $lvl]]: " {*}$args]]
puts stderr [join [list "FATAL: \[[proj-scope $lvl]]: " {*}$args]]
exit 1
}
@@ -119,27 +121,26 @@ proc proj-error {args} {
set args [lassign $args -]
incr lvl
}
error [join [list "\[[proj-current-scope $lvl]]:" {*}$args]]
error [join [list "\[[proj-scope $lvl]]:" {*}$args]]
}
#
# @proj-assert script ?message?
#
# Kind of like a C assert: if uplevel (eval) of [expr {$script}] is
# false, a fatal error is triggered. The error message, by default,
# includes the body of the failed assertion, but if $msg is set then
# that is used instead.
# Kind of like a C assert: if uplevel of [list expr $script] is false,
# a fatal error is triggered. The error message, by default, includes
# the body of the failed assertion, but if $msg is set then that is
# used instead.
#
proc proj-assert {script {msg ""}} {
if {1 == [get-env proj-assert 0]} {
msg-result [proj-bold "asserting: $script"]
}
set x "expr \{ $script \}"
if {![uplevel 1 $x]} {
if {![uplevel 1 [list expr $script]]} {
if {"" eq $msg} {
set msg $script
}
proj-fatal "Assertion failed: $msg"
proj-fatal "Assertion failed in \[[proj-scope 1]\]: $msg"
}
}
@@ -212,7 +213,7 @@ proc proj-indented-notice {args} {
# Returns 1 if cross-compiling, else 0.
#
proc proj-is-cross-compiling {} {
return [expr {[get-define host] ne [get-define build]}]
expr {[get-define host] ne [get-define build]}
}
#
@@ -249,7 +250,7 @@ proc proj-cflags-without-werror {{var CFLAGS}} {
default { lappend rv $f }
}
}
return [join $rv " "]
join $rv " "
}
#
@@ -297,7 +298,7 @@ proc proj-search-for-header-dir {header args} {
-dirs { set args [lassign $args - dirs] }
-subdirs { set args [lassign $args - subdirs] }
default {
proj-fatal "Unhandled argument: $args"
proj-error "Unhandled argument: $args"
}
}
}
@@ -375,7 +376,8 @@ proc proj-first-bin-of {args} {
set rc ""
foreach b $args {
set u [string toupper $b]
# Note that cc-path-progs defines $u to false if it finds no match.
# Note that cc-path-progs defines $u to "false" if it finds no
# match.
if {[cc-path-progs $b]} {
set rc [get-define $u]
}
@@ -519,7 +521,7 @@ proc proj-define-for-opt {flag def {msg ""} {iftrue 1} {iffalse 0}} {
# @proj-opt-define-bool ?-v? optName defName ?descr?
#
# Checks [proj-opt-truthy $optName] and calls [define $defName X]
# where X is 0 for false and 1 for true. descr is an optional
# where X is 0 for false and 1 for true. $descr is an optional
# [msg-checking] argument which defaults to $defName. Returns X.
#
# If args[0] is -v then the boolean semantics are inverted: if
@@ -529,7 +531,7 @@ proc proj-define-for-opt {flag def {msg ""} {iftrue 1} {iffalse 0}} {
proc proj-opt-define-bool {args} {
set invert 0
if {[lindex $args 0] eq "-v"} {
set invert 1
incr invert
lassign $args - optName defName descr
} else {
lassign $args optName defName descr
@@ -540,14 +542,9 @@ proc proj-opt-define-bool {args} {
#puts "optName=$optName defName=$defName descr=$descr"
set rc 0
msg-checking "[join $descr] ... "
if {[proj-opt-truthy $optName]} {
if {0 eq $invert} {
set rc 1
} else {
set rc 0
}
} elseif {0 ne $invert} {
set rc 1
set rc [proj-opt-truthy $optName]
if {$invert} {
set rc [expr {!$rc}]
}
msg-result $rc
define $defName $rc
@@ -1341,7 +1338,7 @@ proc proj-dump-defs-json {file args} {
# that [opt-value canonical] will return X if --alias=X is passed to
# configure.
#
# That said: autosetup's [opt-src] does support alias forms, but it
# That said: autosetup's [opt-str] does support alias forms, but it
# requires that the caller know all possible aliases. It's simpler, in
# terms of options handling, if there's only a single canonical name
# which each down-stream call of [opt-...] has to know.
@@ -1427,11 +1424,11 @@ proc proj-which-linenoise {dotH} {
# manner unless they are explicitly overridden at configure-time, in
# which case those overrides takes precedence.
#
# Each --XYZdir flag which is explicitly passed to configure is
# exported as-is, as are those which default to some top-level system
# directory, e.g. /etc or /var. All which derive from either $prefix
# or $exec_prefix are exported in the form of a Makefile var
# reference, e.g. libdir=${exec_prefix}/lib. Ergo, if
# Each autoconf-relvant --XYZ flag which is explicitly passed to
# configure is exported as-is, as are those which default to some
# top-level system directory, e.g. /etc or /var. All which derive
# from either $prefix or $exec_prefix are exported in the form of a
# Makefile var reference, e.g. libdir=${exec_prefix}/lib. Ergo, if
# --exec-prefix=FOO is passed to configure, libdir will still derive,
# at make-time, from whatever exec_prefix is passed to make, and will
# use FOO if exec_prefix is not overridden at make-time. Without this
@@ -1467,7 +1464,7 @@ proc proj-remap-autoconf-dir-vars {} {
}
# Maintenance reminder: the [join] call is to avoid {braces}
# around the output when someone passes in,
# e.g. --libdir=\${prefix}/foo/bar. The Debian package build
# e.g. --libdir=\${prefix}/foo/bar. Debian's SQLite package build
# script does that.
}
}
@@ -1501,11 +1498,11 @@ proc proj-env-file {flag {dflt ""}} {
# If none of those are set, $dflt is returned.
#
proc proj-get-env {var {dflt ""}} {
return [get-env $var [proj-env-file $var $dflt]]
get-env $var [proj-env-file $var $dflt]
}
#
# @proj-current-scope ?lvl?
# @proj-scope ?lvl?
#
# Returns the name of the _calling_ proc from ($lvl + 1) levels up the
# call stack (where the caller's level will be 1 up from _this_
@@ -1513,7 +1510,7 @@ proc proj-get-env {var {dflt ""}} {
# returned and if it would be negative then a string indicating such
# is returned (as opposed to throwing an error).
#
proc proj-current-scope {{lvl 0}} {
proc proj-scope {{lvl 0}} {
#uplevel [expr {$lvl + 1}] {lindex [info level 0] 0}
set ilvl [info level]
set offset [expr {$ilvl - $lvl - 1}]
@@ -1526,6 +1523,14 @@ proc proj-current-scope {{lvl 0}} {
}
}
#
# Deprecated name of [proj-scope].
#
proc proj-current-scope {{lvl 0}} {
puts stderr \
"Deprecated proj-current-scope called from [proj-scope 1]. Use proj-scope instead."
proj-scope [incr lvl]
}
#
# Converts parts of tclConfig.sh to autosetup [define]s.
@@ -1547,6 +1552,7 @@ proc proj-tclConfig-sh-to-autosetup {tclConfigSh} {
TCL_VERSION
TCL_MAJOR_VERSION
TCL_MINOR_VERSION
TCL_PACKAGE_PATH
TCL_PATCH_LEVEL
TCL_SHLIB_SUFFIX
}
@@ -1563,22 +1569,7 @@ proc proj-tclConfig-sh-to-autosetup {tclConfigSh} {
lappend shBody "exit"
set shBody [join $shBody "\n"]
#puts "shBody=$shBody\n"; exit
if {0} {
# This doesn't work but would be preferable to using a temp file...
set fd [open "| sh" "rw"]
#puts "fd = $fd"; exit
puts $fd $shBody
flush $fd
set rd [read $fd]
close $fd
puts "rd=$rd"; exit 1
eval $rd
} else {
set shName ".tclConfigSh.tcl"
proj-file-write $shName $shBody
eval [exec sh $shName $tclConfigSh]
file delete -force $shName
}
eval [exec echo $shBody | sh]
}
#
@@ -1635,7 +1626,7 @@ proc proj-tweak-default-env-dirs {} {
# If $postProcessScript is not empty then, during
# [proj-dot-ins-process], it will be eval'd immediately after
# processing the file. In the context of that script, the vars
# $fileIn and $fileOut will be set to the input and output file
# $dotInsIn and $dotInsOut will be set to the input and output file
# names. This can be used, for example, to make the output file
# executable or perform validation on its contents.
#
@@ -1657,7 +1648,7 @@ proc proj-dot-ins-append {fileIn args} {
proj-fatal "Too many arguments: $fileIn $args"
}
}
#puts "******* [proj-current-scope]: adding $fileIn"
#puts "******* [proj-scope]: adding $fileIn"
lappend ::proj__Config(dot-in-files) $fileIn
}
@@ -1701,7 +1692,7 @@ proc proj-dot-ins-process {args} {
-validate 0 {expr 1}
}
if {[llength $args] > 0} {
error "Invalid argument to [proj-current-scope]: $args"
error "Invalid argument to [proj-scope]: $args"
}
foreach f $::proj__Config(dot-in-files) {
proj-assert {3==[llength $f]} \
@@ -1713,7 +1704,10 @@ proc proj-dot-ins-process {args} {
proj-validate-no-unresolved-ats $fOut
}
if {"" ne $fScript} {
uplevel 1 "set fileIn $fIn; set fileOut $fOut; eval {$fScript}"
uplevel 1 [join [list set dotInsIn $fIn \; \
set dotInsOut $fOut \; \
eval \{${fScript}\} \; \
unset dotInsIn dotInsOut]]
}
}
if {$flags(-clear)} {
@@ -1752,13 +1746,13 @@ proc proj-validate-no-unresolved-ats {args} {
}
#
# @proj-first-found fileList tgtVar
# @proj-first-file-found tgtVar fileList
#
# Searches $fileList for an existing file. If one is found, its name is
# assigned to tgtVar and 1 is returned, else tgtVar is not modified
# Searches $fileList for an existing file. If one is found, its name
# is assigned to tgtVar and 1 is returned, else tgtVar is set to ""
# and 0 is returned.
#
proc proj-first-file-found {fileList tgtVar} {
proc proj-first-file-found {tgtVar fileList} {
upvar $tgtVar tgt
foreach f $fileList {
if {[file exists $f]} {
@@ -1766,6 +1760,7 @@ proc proj-first-file-found {fileList tgtVar} {
return 1
}
}
set tgt ""
return 0
}
@@ -1775,19 +1770,10 @@ proc proj-first-file-found {fileList tgtVar} {
# can be used to automatically reconfigure.
#
proc proj-setup-autoreconfig {defName} {
set squote {{arg} {
# Wrap $arg in single-quotes if it looks like it might need that
# to avoid mis-handling as a shell argument. We assume that $arg
# will never contain any single-quote characters.
if {[string match {*[ &;$*"]*} $arg]} { return '$arg' }
return $arg
}}
define-append $defName cd [apply $squote $::autosetup(builddir)] \
&& [apply $squote $::autosetup(srcdir)/configure]
#{*}$::autosetup(argv) breaks with --flag='val with spaces', so...
foreach arg $::autosetup(argv) {
define-append $defName [apply $squote $arg]
}
define $defName \
[join [list \
cd \"$::autosetup(builddir)\" \
&& [get-define AUTOREMAKE "error - missing @AUTOREMAKE@"]]]
}
#
@@ -1805,21 +1791,22 @@ proc proj-define-append {defineName args} {
}
#
# @prod-define-amend ?-p|-prepend? ?-define? FLAG args...
# @prod-define-amend ?-p|-prepend? ?-d|-define? defineName args...
#
# A proxy for Autosetup's [define-append].
#
# Appends all non-empty $args to the define named by $FLAG unless. If
# Appends all non-empty $args to the define named by $defineName. If
# one of (-p | -prepend) are used it instead prepends them, in their
# given order, to $FLAG.
# given order, to $defineName.
#
# If -define is used then each argument is assumed to be a [define]'d
# flag and [get-define X ""] is used to fetch it.
#
# Typically, -lXYZ flags need to be in "reverse" order, with each -lY
# resolving symbols for -lX's to its left. This order is largely
# historical, and not relevant on all environments, but it is
# technically correct and still relevant on some environments.
# Re. linker flags: typically, -lXYZ flags need to be in "reverse"
# order, with each -lY resolving symbols for -lX's to its left. This
# order is largely historical, and not relevant on all environments,
# but it is technically correct and still relevant on some
# environments.
#
# See: proj-append-to
#
@@ -1830,9 +1817,9 @@ proc proj-define-amend {args} {
set xargs [list]
foreach arg $args {
switch -exact -- $arg {
-p - -prepend { set prepend 1 }
-d - -define { set isdefs 1 }
"" {}
-p - -prepend { incr prepend }
-d - -define { incr isdefs }
default {
if {"" eq $defName} {
set defName $arg
@@ -1842,6 +1829,9 @@ proc proj-define-amend {args} {
}
}
}
if {"" eq $defName} {
proj-error "Missing defineName argument in call from [proj-scope 1]"
}
if {$isdefs} {
set args $xargs
set xargs [list]
@@ -1863,47 +1853,51 @@ proc proj-define-amend {args} {
}
#
# @proj-define-to-cflag ?-list? defineName...
# @proj-define-to-cflag ?-list? ?-quote? ?-zero-undef? defineName...
#
# Treat each argument as the name of a [define]
# and attempt to render it like a CFLAGS value:
# Treat each argument as the name of a [define] and renders it like a
# CFLAGS value in one of the following forms:
#
# -D$name
# -D$name=value
# -D$name=integer (strict integer matches only)
# '-D$name=value' (without -quote)
# '-D$name="value"' (with -quote)
#
# If treats integers as numbers and everything else as a quoted
# It treats integers as numbers and everything else as a quoted
# string, noting that it does not handle strings which themselves
# contain quotes.
#
# The -zero-undef flag causes no -D to be emitted for integer values
# of 0.
#
# By default it returns the result as string of all -D... flags,
# but if passed the -list flag it will return a list of the
# individual CFLAGS.
#
proc proj-define-to-cflag {args} {
set rv {}
set xargs {}
set returnList 0;
foreach arg $args {
switch -exact -- $arg {
-list {incr returnList}
default {
lappend xargs $arg
}
}
proj-parse-simple-flags args flags {
-list 0 {expr 1}
-quote 0 {expr 1}
-zero-undef 0 {expr 1}
}
foreach d $xargs {
foreach d $args {
set v [get-define $d ""]
set li [list -D${d}]
if {[string is integer -strict $v]} {
lappend li = $v
} elseif {"" eq $d} {
set li {}
if {"" eq $d} {
set v "-D${d}"
} elseif {[string is integer -strict $v]} {
if {!$flags(-zero-undef) || $v ne "0"} {
set v "-D${d}=$v"
}
} elseif {$flags(-quote)} {
set v "'-D${d}=\"$v\"'"
} else {
lappend li = {"} $v {"}
set v "'-D${d}=$v'"
}
lappend rv [join $li ""]
lappend rv $v
}
if {$returnList} { return $rv }
return [join $rv]
expr {$flags(-list) ? $rv : [join $rv]}
}
@@ -1967,7 +1961,7 @@ array set proj__Cache {}
# Returns a cache key for the given argument:
#
# integer: relative call stack levels to get the scope name of for
# use as a key. [proj-current-scope [expr {1 + $arg + addLevel}]] is
# use as a key. [proj-scope [expr {1 + $arg + addLevel}]] is
# then used to generate the key. i.e. the default of 0 uses the
# calling scope's name as the key.
#
@@ -1978,7 +1972,7 @@ array set proj__Cache {}
proc proj-cache-key {{addLevel 0} arg} {
if {"-" eq $arg} {set arg 0}
if {[string is integer -strict $arg]} {
return [proj-current-scope [expr {$arg + $addLevel + 1}]]
return [proj-scope [expr {$arg + $addLevel + 1}]]
}
return $arg
}
@@ -2064,7 +2058,7 @@ proc proj-coalesce {args} {
# -flag defaultValue {script}
#
# -flag => defaultValue
# -----^--^ (wiith spaces there!)
# -----^--^ (with spaces there!)
#
# Repeated for each flag.
#
@@ -2096,8 +2090,8 @@ proc proj-coalesce {args} {
# This function assumes that each flag is unique, and using a flag
# more than once behaves in a last-one-wins fashion.
#
# Any $argv entries not described in $prototype are not treated
# as flags.
# Any argvName entries not described in $prototype are not treated as
# flags.
#
# Returns the number of flags it processed in $argvName.
#
@@ -2113,6 +2107,10 @@ proc proj-coalesce {args} {
# After that $flags would contain {-foo 1 -bar {blah} -no-baz 2}
# and $args would be {8 9 10}.
#
# Potential TODOs: consider using lappend instead of set so that any
# given flag can be used more than once. Or add a syntax to indicate
# that.
#
proc proj-parse-simple-flags {argvName tgtArrayName prototype} {
upvar $argvName argv
upvar $tgtArrayName tgt
@@ -2121,26 +2119,28 @@ proc proj-parse-simple-flags {argvName tgtArrayName prototype} {
array set consuming {}
set n [llength $prototype]
# Figure out what our flags are...
for {set i 0} {$i < $n} {} {
for {set i 0} {$i < $n} {incr i} {
set k [lindex $prototype $i]
#puts "**** #$i of $n k=$k"
proj-assert {[string match -* $k]} \
"Invalid flag value for [proj-current-scope]: $k"
"Invalid flag value: $k"
set v ""
set s ""
if {"=>" eq [lindex $prototype [expr {$i + 1}]]} {
incr i 2
if {$i >= $n} {
proj-fatal "Missing argument for $k => flag"
switch -exact -- [lindex $prototype [expr {$i + 1}]] {
=> {
incr i 2
if {$i >= $n} {
proj-error "Missing argument for $k => flag"
}
set consuming($k) 1
set v [lindex $prototype $i]
}
default {
set v [lindex $prototype [incr i]]
set s [lindex $prototype [incr i]]
set scripts($k) $s
}
set consuming($k) 1
set v [lindex $prototype $i]
} else {
set v [lindex $prototype [incr i]]
set s [lindex $prototype [incr i]]
set scripts($k) $s
}
incr i
#puts "**** #$i of $n k=$k v=$v s=$s"
set dflt($k) $v
}
@@ -2160,7 +2160,7 @@ proc proj-parse-simple-flags {argvName tgtArrayName prototype} {
} elseif {[info exists tgt($arg)]} {
if {[info exists consuming($arg)]} {
if {$i + 1 >= $n} {
proj-fatal "Missing argument for $arg flag"
proj-assert 0 {Cannot happen - bounds already checked}
}
set tgt($arg) [lindex $argv [incr i]]
} elseif {"" eq $scripts($arg)} {

View File

@@ -538,7 +538,7 @@ proc msg-debug {msg} {
# the debug message. It is not legal to call this from the global
# scope.
proc proc-debug {msg} {
msg-debug "\[[proj-current-scope 1]\]: $msg"
msg-debug "\[[proj-scope 1]\]: $msg"
}
define OPT_FEATURE_FLAGS {} ; # -DSQLITE_OMIT/ENABLE flags.
@@ -994,7 +994,7 @@ proc sqlite-handle-emsdk {} {
proj-bin-define wasm-opt BIN_WASM_OPT
}
proj-dot-ins-append $emccSh.in $emccSh {
catch {exec chmod u+x $fileOut}
catch {exec chmod u+x $dotInsOut}
}
proj-dot-ins-append $extWasmConfig.in $extWasmConfig
} else {

View File

@@ -15,6 +15,7 @@
<li> 3.1. <a href=#commands_to_run_tests>Commands to Run SQLite Tests</a>
<li> 3.2. <a href=#zipvfs_tests>Running ZipVFS Tests</a>
<li> 3.3. <a href=#source_code_test_failures>Investigating Source Code Test Failures</a>
<li> 3.4. <a href=#fuzzdb>External Fuzzcheck Databases</a>
</ul>
<li> 4. <a href=#testrunner_options>Extra testrunner.tcl Options</a>
<li> 5. <a href=#cpu_cores>Controlling CPU Core Utilization</a>
@@ -23,47 +24,62 @@
<a name=overview></a>
# 1. Overview
testrunner.tcl is a Tcl script used to run multiple SQLite tests using
multiple jobs. It supports the following types of tests:
The testrunner.tcl program is a Tcl script used to run multiple SQLite
tests in parallel, thus reducing testing time on multi-core machines.
It supports the following types of tests:
* Tcl test scripts.
* Fuzzcheck tests, including using an external fuzzcheck database.
* Tests run with `make` commands. Examples:
- `make mdevtest`
- `make devtest`
- `make releasetest`
- `make sdevtest`
- `make testrunner`
testrunner.tcl pipes the output of all tests and builds run into log file
**testrunner.log**, created in the current working directory. Search this
file to find details of errors. Suggested search commands:
The testrunner.tcl program stores output of all tests and builds run in
log file **testrunner.log**, created in the current working directory.
Search this file to find details of errors. Suggested search commands:
* `grep "^!" testrunner.log`
* `grep failed testrunner.log`
testrunner.tcl also populates SQLite database **testrunner.db**. This database
contains details of all tests run, running and to be run. A useful query
might be:
The testrunner.tcl program also populates SQLite database **testrunner.db**.
This database contains details of all tests run, running and to be run.
A useful query might be:
```
SELECT * FROM script WHERE state='failed'
```
You can get a summary of errors in a prior run by invoking commands like
these:
```
tclsh $(TESTDIR)/testrunner.tcl errors
tclsh $(TESTDIR)/testrunner.tcl errors -v
```
Running the command:
```
./testfixture $(TESTDIR)/testrunner.tcl status
tclsh $(TESTDIR)/testrunner.tcl status
```
in the directory containing the testrunner.db database runs various queries
to produce a succinct report on the state of a running testrunner.tcl script.
Running:
A good way to keep and eye on test progress is to run either of the two
following commands:
```
watch ./testfixture $(TESTDIR)/testrunner.tcl status
watch tclsh $(TESTDIR)/testrunner.tcl status
tclsh $(TESTDIR)/testrunner.tcl status -d 2
```
in another terminal is a good way to keep an eye on a long running test.
Both of the commands above accomplish about the same thing, but the second
one has the advantage of not requiring "watch" to be installed on your
system.
Sometimes testrunner.tcl uses the `testfixture` binary that it is run with
to run tests (see "Binary Tests" below). Sometimes it builds testfixture and
@@ -239,9 +255,7 @@ of the specific tests run.
As with <a href=#source code tests>source code tests</a>, one or more patterns
may be appended to any of the above commands (mdevtest, sdevtest or release).
In that case only Tcl tests (no fuzz or other tests) that match the specified
pattern are run. For example, to run the just the Tcl rtree tests in all
builds and configurations supported by "release":
Pattern matching is used for both Tcl tests and fuzz tests.
```
tclsh $TESTDIR/testrunner.tcl release rtree%
@@ -292,6 +306,34 @@ target to build. This may be used either to run a `make` command test directly,
or else to build a testfixture (or testfixture.exe) binary with which to
run a Tcl test script, as <a href=#binary_test_failures>described above</a>.
<a name=fuzzdb></a>
## 3.4 External Fuzzcheck Databases
Testrunner.tcl will also run fuzzcheck against an external (out of tree)
database, for example fuzzcheck databases generated by dbsqlfuzz. To do
this, simply add the "`--fuzzdb` *FILENAME*" command-line option or set
the FUZZDB environment variable to the name of the external
database. For large external databases, testrunner.tcl will automatically use
the "`--slice`" command-line option of fuzzcheck to divide the work up into
multiple jobs, to increase parallelism.
Thus, for example, to run a full releasetest including an external
dbsqlfuzz database, run a command like one of these:
```
tclsh test/testrunner.tcl releasetest --fuzzdb ../fuzz/20250415.db
FUZZDB=../fuzz/20250415.db make releasetest
nmake /f Makefile.msc FUZZDB=../fuzz/20250415.db releasetest
```
The patternlist option to testrunner.tcl will match against fuzzcheck
databases. So if you want to run *only* tests involving the external
database, you can use a command something like this:
```
tclsh test/testrunner.tcl releasetest 20250415 --fuzzdb ../fuzz/20250415.db
```
<a name=testrunner_options></a>
# 4. Extra testrunner.tcl Options
@@ -315,16 +357,22 @@ would normally execute into the testrunner.log file. Example:
tclsh $TESTDIR/testrunner.tcl --dryrun mdevtest"
```
The **--explain** option is similar to --dryrun in that it prevents testrunner.tcl
from building any binaries or running any tests. The difference is that --explain
prints on standard output a human-readable summary of all the builds and tests that
would have been run.
The **--explain** option is similar to --dryrun in that it prevents
testrunner.tcl from building any binaries or running any tests. The
difference is that --explain prints on standard output a human-readable
summary of all the builds and tests that would have been run.
```
# Show what builds and tests would have been run
tclsh $TESTDIR/testrunner.tcl --explain mdevtest
```
The **--status** option uses VT100 escape sequences to display the test
status full-screen. This is similar to running
"`watch test/testrunner status`" in a separate window, just more convenient.
Unfortunately, this option does not work correctly on Windows, due to the
sketchy implementation of VT100 escapes on the Windows console.
<a name=cpu_cores></a>
# 5. Controlling CPU Core Utilization

View File

@@ -528,9 +528,12 @@ do_execsql_test 16.0 {
BEGIN EXCLUSIVE;
}
sqlite3 db2 test.db
do_test 16.1 {
catchsql { SELECT * FROM t1 } db2
set rc [catch {
sqlite3 db2 test.db
db2 eval {SELECT * FROM t1}
} errmsg]
lappend rc $errmsg
} {1 {database is locked}}
do_execsql_test 16.2 {
@@ -542,4 +545,3 @@ do_test 16.3 {
} {0 {}}
finish_test

113
main.mk
View File

@@ -2245,119 +2245,6 @@ fuzzcheck-ubsan$(T.exe): $(FUZZCHECK_SRC) sqlite3.c sqlite3.h $(FUZZCHECK_DEP)
fuzzy: fuzzcheck-ubsan$(T.exe)
xbin: fuzzcheck-ubsan$(T.exe)
# Usage: FUZZDB=filename make run-fuzzcheck
#
# Where filename is a fuzzcheck database, this target builds and runs
# fuzzcheck, fuzzcheck-asan, and fuzzcheck-ubsan on that database.
#
# FUZZDB can be a glob pattern of two or more databases. Example:
#
# FUZZDB=test/fuzzdata*.db make run-fuzzcheck
#
# The original rules for this target were like this:
#
# run-fuzzcheck: fuzzcheck$(T.exe) fuzzcheck-asan$(T.exe) fuzzcheck-ubsan$(T.exe)
# @if test "$(FUZZDB)" = ""; then echo 'ERROR: No FUZZDB specified. Rerun with FUZZDB=filename'; exit 1; fi
# ./fuzzcheck$(T.exe) --spinner $(FUZZDB)
# ./fuzzcheck-asan$(T.exe) --spinner $(FUZZDB)
# ./fuzzcheck-ubsan$(T.exe) --spinner $(FUZZDB)
#
# What follows is a decomposition of these rules in a way that allows make
# to run things in parallel when using the -jN option.
#
FUZZDB-check:
@if test "$(FUZZDB)" = ""; then echo 'ERROR: No FUZZDB specified. Rerun with FUZZDB=filename'; exit 1; fi
run-fuzzcheck: run-fuzzcheck-n0
run-fuzzcheck-n0: FUZZDB-check fuzzcheck$(T.exe)
./fuzzcheck$(T.exe) --slice 0 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-n1
run-fuzzcheck-n1: FUZZDB-check fuzzcheck$(T.exe)
./fuzzcheck$(T.exe) --slice 1 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-n2
run-fuzzcheck-n2: FUZZDB-check fuzzcheck$(T.exe)
./fuzzcheck$(T.exe) --slice 2 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-n3
run-fuzzcheck-n3: FUZZDB-check fuzzcheck$(T.exe)
./fuzzcheck$(T.exe) --slice 3 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-n4
run-fuzzcheck-n4: FUZZDB-check fuzzcheck$(T.exe)
./fuzzcheck$(T.exe) --slice 4 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-n5
run-fuzzcheck-n5: FUZZDB-check fuzzcheck$(T.exe)
./fuzzcheck$(T.exe) --slice 5 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-n6
run-fuzzcheck-n6: FUZZDB-check fuzzcheck$(T.exe)
./fuzzcheck$(T.exe) --slice 6 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-n7
run-fuzzcheck-n7: FUZZDB-check fuzzcheck$(T.exe)
./fuzzcheck$(T.exe) --slice 7 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-n8
run-fuzzcheck-n8: FUZZDB-check fuzzcheck$(T.exe)
./fuzzcheck$(T.exe) --slice 8 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-n9
run-fuzzcheck-n9: FUZZDB-check fuzzcheck$(T.exe)
./fuzzcheck$(T.exe) --slice 9 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-a0
run-fuzzcheck-a0: FUZZDB-check fuzzcheck-asan$(T.exe)
./fuzzcheck-asan$(T.exe) --slice 0 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-a1
run-fuzzcheck-a1: FUZZDB-check fuzzcheck-asan$(T.exe)
./fuzzcheck-asan$(T.exe) --slice 1 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-a2
run-fuzzcheck-a2: FUZZDB-check fuzzcheck-asan$(T.exe)
./fuzzcheck-asan$(T.exe) --slice 2 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-a3
run-fuzzcheck-a3: FUZZDB-check fuzzcheck-asan$(T.exe)
./fuzzcheck-asan$(T.exe) --slice 3 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-a4
run-fuzzcheck-a4: FUZZDB-check fuzzcheck-asan$(T.exe)
./fuzzcheck-asan$(T.exe) --slice 4 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-a5
run-fuzzcheck-a5: FUZZDB-check fuzzcheck-asan$(T.exe)
./fuzzcheck-asan$(T.exe) --slice 5 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-a6
run-fuzzcheck-a6: FUZZDB-check fuzzcheck-asan$(T.exe)
./fuzzcheck-asan$(T.exe) --slice 6 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-a7
run-fuzzcheck-a7: FUZZDB-check fuzzcheck-asan$(T.exe)
./fuzzcheck-asan$(T.exe) --slice 7 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-a8
run-fuzzcheck-a8: FUZZDB-check fuzzcheck-asan$(T.exe)
./fuzzcheck-asan$(T.exe) --slice 8 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-a9
run-fuzzcheck-a9: FUZZDB-check fuzzcheck-asan$(T.exe)
./fuzzcheck-asan$(T.exe) --slice 9 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-u0
run-fuzzcheck-u0: FUZZDB-check fuzzcheck-ubsan$(T.exe)
./fuzzcheck-ubsan$(T.exe) --slice 0 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-u1
run-fuzzcheck-u1: FUZZDB-check fuzzcheck-ubsan$(T.exe)
./fuzzcheck-ubsan$(T.exe) --slice 1 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-u2
run-fuzzcheck-u2: FUZZDB-check fuzzcheck-ubsan$(T.exe)
./fuzzcheck-ubsan$(T.exe) --slice 2 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-u3
run-fuzzcheck-u3: FUZZDB-check fuzzcheck-ubsan$(T.exe)
./fuzzcheck-ubsan$(T.exe) --slice 3 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-u4
run-fuzzcheck-u4: FUZZDB-check fuzzcheck-ubsan$(T.exe)
./fuzzcheck-ubsan$(T.exe) --slice 4 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-u5
run-fuzzcheck-u5: FUZZDB-check fuzzcheck-ubsan$(T.exe)
./fuzzcheck-ubsan$(T.exe) --slice 5 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-u6
run-fuzzcheck-u6: FUZZDB-check fuzzcheck-ubsan$(T.exe)
./fuzzcheck-ubsan$(T.exe) --slice 6 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-u7
run-fuzzcheck-u7: FUZZDB-check fuzzcheck-ubsan$(T.exe)
./fuzzcheck-ubsan$(T.exe) --slice 7 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-u8
run-fuzzcheck-u8: FUZZDB-check fuzzcheck-ubsan$(T.exe)
./fuzzcheck-ubsan$(T.exe) --slice 8 10 $(FUZZDB)
run-fuzzcheck: run-fuzzcheck-u9
run-fuzzcheck-u9: FUZZDB-check fuzzcheck-ubsan$(T.exe)
./fuzzcheck-ubsan$(T.exe) --slice 9 10 $(FUZZDB)
ossshell$(T.exe): $(TOP)/test/ossfuzz.c $(TOP)/test/ossshell.c sqlite3.c sqlite3.h
$(T.link) -o $@ $(FUZZCHECK_OPT) $(TOP)/test/ossshell.c \

View File

@@ -1,12 +1,12 @@
C Merge\sthe\slatest\strunk\senhancements\sinto\sthe\swal2\sbranch.
D 2025-04-21T12:56:24.808
C Merge\sthe\slatest\strunk\schanges\sinto\sthe\swal2\sbranch.
D 2025-05-06T21:38:45.322
F .fossil-settings/binary-glob 61195414528fb3ea9693577e1980230d78a1f8b0a54c78cf1b9b24d0a409ed6a x
F .fossil-settings/empty-dirs dbb81e8fc0401ac46a1491ab34a7f2c7c0452f2f06b54ebb845d024ca8283ef1
F .fossil-settings/ignore-glob 35175cdfcf539b2318cb04a9901442804be81cd677d8b889fcc9149c21f239ea
F LICENSE.md e108e1e69ae8e8a59e93c455654b8ac9356a11720d3345df2a4743e9590fb20d
F Makefile.in c3e414df4dc8dfb12f1f6baf129fcb6d18cd0ebd3c9109370fb3fceeeef9a37a
F Makefile.linux-generic bd3e3cacd369821a6241d4ea1967395c962dfe3057e38cb0a435cee0e8b789d0
F Makefile.msc 7f78b2f5817694e7b75d1ed9480b8efbe9e7576068b8e91797e6ed99867b7846
F Makefile.msc 3e6234d032b455f988a353e9c3e29c34f5d1feab80ea78426b7498affdc47350
F README.md e28077cfbef795e99c9c75ed95aa7257a1166709b562076441a8506ac421b7c1
F VERSION 001dea55eb8304ec9130b6b44a32d3fc349f279d45a7e224fc0730c3cb8e2372
F art/icon-243x273.gif 9750b734f82fdb3dc43127753d5e6fbf3b62c9f4e136c2fbf573b2f57ea87af5
@@ -18,24 +18,24 @@ F art/sqlite370.svg 40b7e2fe8aac3add5d56dd86ab8d427a4eca5bcb3fe4f8946cb3794e1821
F auto.def 82c32443a91c1062f7a48beec37dbb2d8d03447b1286bce8df5ebb6d8d353f8a
F autoconf/Makefile.fallback 22fe523eb36dfce31e0f6349f782eb084e86a5620b2b0b4f84a2d6133f53f5ac
F autoconf/Makefile.in 36516827bb5d2309422bbcbf53e873fa22ef179f5c25b1d3dc6a7255e63270b7
F autoconf/Makefile.msc 5bc67d3912444c40c6f96d003e5c90663e51abb83d204a520110b1b2038dcd8b
F autoconf/Makefile.msc f15ad424ca2820df8e39d9157965710af0a64d87773706706a12ea4f96e3a0d8
F autoconf/README.first f1d3876e9a7852c22f275a6f06814e64934cecbc0b5b9617d64849094c1fd136
F autoconf/README.txt b749816b8452b3af994dc6d607394bef3df1736d7e09359f1087de8439a52807
F autoconf/auto.def 3d994f3a9cc9b712dbce92a5708570ddcf3b988141b6eb738f2ed16127a9f0ac
F autoconf/tea/Makefile.in 41159d167c3b1a9bab7253f268d1ad1dc71ab8a9a6ccc9744a313aba40249a6a
F autoconf/tea/Makefile.in 8c00e2ed350754d6b45681318ed7e4578aed8ad732abcac0593c1b10dc29e5a6
F autoconf/tea/README.txt 656d4686c509d375f5988ff3deda94f65fe6cd8358cd55d1f1dcc7b6e2ff73aa
F autoconf/tea/auto.def 81e2617cfb90d53c19b53b3ec632cd2893bf32f2e5dd272b1116fadf2ea86c2d
F autoconf/tea/auto.def ce95b9450e2fa4ba5dc857e208fe10f4e6f2d737796ac3278aee6079db417529
F autoconf/tea/autosetup/README.txt b40071e6f8506500a2f7f71d5fc69e0bf87b9d7678dd9da1e5b4d0acbf40b1ca
F autoconf/tea/autosetup/core.tcl dea4684df6c5765a8fa866b4a6d79c73ba413ad37bb7c70e1be3631cd9eb2831
F autoconf/tea/autosetup/feature-tests.tcl 307cac35806817fc87bd0d92004364ee682c0e99f2ab505291500493ab6c7a5f
F autoconf/tea/autosetup/tester.tcl a201ef9e91dde182e73015d09f94a3c40dd534ce38642167ebfd5884801d1e5a
F autoconf/tea/autosetup/core.tcl 7d942639871111e2fcef571c9d5a6e2dc75972eb214cf814a6b99f1e2b25182f
F autoconf/tea/autosetup/feature-tests.tcl 18194fb79a24d30e5bbdeab40999616f39278b53a27525349ded033af2fd73be
F autoconf/tea/autosetup/tester.tcl c293695a0ab5d9e8d0ceeb0ee422f90e8a6aa9f0c7c51acd0b6d9f09d8edfed3
F autoconf/tea/configure d0b12b984edca6030d1976375b80157ac78b5b90a5b4f0dcee39357f63f4a80b x
F autoconf/tea/doc/sqlite3.n 9a97f4f717ceab73004ea412af7960625c1cb24b5c25e4ae4c8b5d8fa4300f4e
F autoconf/tea/license.terms 13bd403c9610fd2b76ece0ab50c4c5eda933d523
F autoconf/tea/pkgIndex.tcl.in a41b98633e184703136914e588638423047eae71244a30061ccd8edf92498436
F autoconf/tea/teaish.tcl 2bc7ca287baa076daca513ff9342c00f499b1cceb8302a93d4a6231b00b52504
F autoconf/tea/pkgIndex.tcl.in e07da6b94561f4aa382bab65b1ccceb04701b97bf59d007c1d1f20a222b22d07
F autoconf/tea/teaish.tcl 8e124f33cbaf9309f3e49be4e7018a03b8f3a52f8c8d9e1e5419f4f7b0eae59e
F autoconf/tea/teaish.test.tcl cfe94e1fb79dd078f650295be59843d470125e0cc3a17a1414c1fb8d77f4aea6
F autoconf/tea/teaish.tester.tcl.in 63059e35289ac663c7d0052e6c0089a309fee75225e86e4ec5b3d9f2c1d9290a
F autoconf/tea/teaish.tester.tcl.in 31ac5b7b1e226b7e1bfc6b578a6c1a51550306ef7afae5949eec046df006ca7d
F autosetup/LICENSE 41a26aebdd2cd185d1e2b210f71b7ce234496979f6b35aef2cbf6b80cbed4ce4
F autosetup/README.autosetup a78ff8c4a3d2636a4268736672a74bf14a82f42687fcf0631a70c516075c031e
F autosetup/README.md f324bb9f9bf1cc787122034df53fbfdfed28ee2657e6652b763d992ab0d04829
@@ -48,11 +48,11 @@ F autosetup/cc-db.tcl 6e0ed90146197a5a05b245e649975c07c548e30926b218ca3e1d4dc034
F autosetup/cc-lib.tcl 493c5935b5dd3bf9bd4eca89b07c8b1b1a9356d61783035144e21795facf7360
F autosetup/cc-shared.tcl 4f024e94a47f427ba61de1739f6381ef0080210f9fae89112d5c1de1e5460d78
F autosetup/cc.tcl c0fcc50ca91deff8741e449ddad05bcd08268bc31177e613a6343bbd1fd3e45f
F autosetup/find_tclconfig.tcl e64886ffe3b982d4df42cd28ed91fe0b5940c2c5785e126c1821baf61bc86a7e w tool/find_tclconfig.tcl
F autosetup/jimsh0.c a57c16e65dcffc9c76e496757cb3f7fb47e01ecbd1631a0a5e01751fc856f049
F autosetup/find_tclconfig.tcl e64886ffe3b982d4df42cd28ed91fe0b5940c2c5785e126c1821baf61bc86a7e
F autosetup/jimsh0.c 563b966c137a4ce3c9333e5196723b7ac0919140a9d7989eb440463cd855c367
F autosetup/pkg-config.tcl 4e635bf39022ff65e0d5434339dd41503ea48fc53822c9c5bde88b02d3d952ba
F autosetup/proj.tcl 11e46ff237ca96d07fd14fe96132f8e77fee6442d8baa2e808bbe95016583c56
F autosetup/sqlite-config.tcl bde169c42d5d5331485ae7785f5dd19ca1aa01315787e8306441c472ad4e4201
F autosetup/proj.tcl 0287234d817e800ab0e10d46bf98545ba5762edd69e5dd0e2902029a7e6c3555
F autosetup/sqlite-config.tcl 7ff986f6c3951f3aec5608522cbf772d8d04a0d26cc894289e2ca4836e018719
F autosetup/system.tcl 51d4be76cd9a9074704b584e5c9cbba616202c8468cf9ba8a4f8294a7ab1dba9
F configure 9a00b21dfd13757bbfb8d89b30660a89ec1f8f3a79402b8f9f9b6fc475c3303a x
F contrib/sqlitecon.tcl eb4c6578e08dd353263958da0dc620f8400b869a50d06e271ab0be85a51a08d3
@@ -64,7 +64,7 @@ F doc/jsonb.md ede3238186e3a90bb79d20b2a6a06d0f9429a38e069e9da0efbad0f2ed48b924
F doc/lemon.html 89ea833a6f71773ab1a9063fbb7fb9b32147bc0b1057b53ecab94a3b30c0aef5
F doc/pager-invariants.txt 83aa3a4724b2d7970cc3f3461f0295c46d4fc19a835a5781cbb35cb52feb0577
F doc/tcl-extension-testing.md b88861804fc1eaf83249f8e206334189b61e150c360e1b80d0dcf91af82354f5
F doc/testrunner.md 15583cf8c7d8a1c3378fd5d4319ca769a14c4d950a5df9b015d01d5be290dc69
F doc/testrunner.md 5ee928637e03f136a25fef852c5ed975932e31927bd9b05a574424ae18c31019
F doc/trusted-schema.md 33625008620e879c7bcfbbfa079587612c434fa094d338b08242288d358c3e8a
F doc/vdbesort-memory.md 4da2639c14cd24a31e0af694b1a8dd37eaf277aff3867e9a8cc14046bc49df56
F doc/vfs-shm.txt 1a55f3f0e7b6745931b117ba5c9df3640d7a0536f532ef0052563100f4416f86
@@ -206,7 +206,7 @@ F ext/fts5/test/fts5lastrowid.test f36298a1fb9f988bde060a274a7ce638faa9c38a31400
F ext/fts5/test/fts5leftjoin.test c0b4cafb9661379e576dc4405c0891d8fcc2782680740513c4d1fc114b43d4ad
F ext/fts5/test/fts5limits.test 8ab67cf5d311c124b6ceb0062d0297767176df4572d955fce79fa43004dff01c
F ext/fts5/test/fts5locale.test 83ba7ee12628b540d3098f39c39c1de0c0440eddff8f7512c8c698d0c4a3ae3c
F ext/fts5/test/fts5matchinfo.test 7806f6d521bb49bcb54fff88a50f137866f7000c96ccfd28500caa47b63cb0aa
F ext/fts5/test/fts5matchinfo.test bc9e74157773db7f00aec1e85587f1145956ebdf1672c136f0f04323b2752aa0
F ext/fts5/test/fts5merge.test 2654df0bcdb2d117c2d38b6aeb0168061be01c643f9e9194b36c43a2970e8082
F ext/fts5/test/fts5merge2.test 3ebad1a59d6ad3fb66eff6523a09e95dc6367cbefb3cd73196801dea0425c8e2
F ext/fts5/test/fts5misc.test f4dee7da898d605a6488c5b7afaace3158ed6bb9addff78faa1b37b402b77fb9
@@ -711,7 +711,7 @@ F ext/wasm/tests/opfs/sahpool/sahpool-pausing.js f264925cfc82155de38cecb3d204c36
F ext/wasm/tests/opfs/sahpool/sahpool-worker.js bd25a43fc2ab2d1bafd8f2854ad3943ef673f7c3be03e95ecf1612ff6e8e2a61
F ext/wasm/wasmfs.make 68999f5bd8c489239592d59a420f8c627c99169bbd6fa16a404751f757b9f702
F magic.txt 5ade0bc977aa135e79e3faaea894d5671b26107cc91e70783aa7dc83f22f3ba0
F main.mk a94af0553cc09c89632573a98de0dad3600c4da9225d8efa62f3b7840d7008c0
F main.mk 3bc2070edfdc2279d21a469cb74737cdcbd2f535fdaf9c284c1878f8fb5b31a2
F mptest/config01.test 3c6adcbc50b991866855f1977ff172eb6d901271
F mptest/config02.test 4415dfe36c48785f751e16e32c20b077c28ae504
F mptest/crash01.test 61e61469e257df0850df4293d7d4d6c2af301421
@@ -731,14 +731,14 @@ F src/btmutex.c 30dada73a819a1ef5b7583786370dce1842e12e1ad941e4d05ac29695528daea
F src/btree.c d1ca761240cb8db94f71d977aa83c80e07b589ca0f52cbf122b73fb032dcdf6c
F src/btree.h 18e5e7b2124c23426a283523e5f31a4bff029131b795bb82391f9d2f3136fc50
F src/btreeInt.h 9c0f9ea5c9b5f4dcaea18111d43efe95f2ac276cd86d770dce10fd99ccc93886
F src/build.c ad72c60b6e01053dea9d1e03f52de747138e4b0de1c73f17501ea307cf40a240
F src/build.c 67c1db4c5e89a8519fe9b6dafc287f6bc3627696b5b8536dc5e06db570d8c05f
F src/callback.c acae8c8dddda41ee85cfdf19b926eefe830f371069f8aadca3aa39adf5b1c859
F src/complete.c a3634ab1e687055cd002e11b8f43eb75c17da23e
F src/date.c 9db4d604e699a73e10b8e85a44db074a1f04c0591a77e2abfd77703f50dce1e9
F src/dbpage.c fcb1aafe00872a8aff9a7aa0ef7ff1b01e5817ec7bbd521f8f3e1e674ac8d609
F src/dbstat.c 73362c0df0f40ad5523a6f5501224959d0976757b511299bf892313e79d14f5c
F src/delete.c 03a77ba20e54f0f42ebd8eddf15411ed6bdb06a2c472ac4b6b336521bf7cea42
F src/expr.c 61c3baab38f1b50eb4696e1f37c8f7ae1d1ecbfc1a35d446cfd1886624784131
F src/expr.c 6f184da1f36576ad1ecc48a03f14774235373c64f88d462c710834930ee6c145
F src/fault.c 460f3e55994363812d9d60844b2a6de88826e007
F src/fkey.c 928ed2517e8732113d2b9821aa37af639688d752f4ea9ac6e0e393d713eeb76f
F src/func.c 7686ea382b20e8bfe2ab9de76150c99ee7b6e83523561f3c7787e0f68cb435c2
@@ -748,7 +748,7 @@ F src/hash.h 46b92795a95bfefb210f52f0c316e9d7cdbcdd7e7fcfb0d8be796d3a5767cddf
F src/hwtime.h f9c2dfb84dce7acf95ce6d289e46f5f9d3d1afd328e53da8f8e9008e3b3caae6
F src/in-operator.md 10cd8f4bcd225a32518407c2fb2484089112fd71
F src/insert.c d05934dfab2c5c0c480fc6fd2038f11215661de08ea6ff38d2563216bd555c1b
F src/json.c d0e1c62a9689018fe5876b1a1eb574e4ab7a40e0928e0e5adda8b3860c70e1b8
F src/json.c c84b0f2bae967341d5a035808d21d0619b23b6e054ceac08e7592cd87f04ed4f
F src/legacy.c d7874bc885906868cd51e6c2156698f2754f02d9eee1bae2d687323c3ca8e5aa
F src/loadext.c 7432c944ff197046d67a1207790a1b13eec4548c85a9457eb0896bb3641dfb36
F src/main.c 07f78d917ffcdf327982840cfd8e855fd000527a2ea5ace372ce4febcbd0bf97
@@ -788,7 +788,7 @@ F src/random.c 606b00941a1d7dd09c381d3279a058d771f406c5213c9932bbd93d5587be4b9c
F src/resolve.c 20e1fbe8f840ffc0cd835e33f68a802a22e34faa918d7a269f3de242fda02f99
F src/rowset.c 8432130e6c344b3401a8874c3cb49fefe6873fec593294de077afea2dce5ec97
F src/select.c 1a5956231f7c57571288eaad61e5c37aaf0f3acb5c8a5ea0b896938166b62fa2
F src/shell.c.in 1e8b9bf369e80cdf9b029142e773038bc12bd38aea1c56df4af6bf7b46cae955
F src/shell.c.in 2c904da4431fed365e7d5029b8cb2da46cb3e8cf8a09305d1478836a2301ea96
F src/sqlite.h.in 22882ddd3a70751aa8864c81993ee4562ed54c2c508b6270f75e223ffee38e1b
F src/sqlite3.rc 015537e6ac1eec6c7050e17b616c2ffe6f70fca241835a84a4f0d5937383c479
F src/sqlite3ext.h 3f046c04ea3595d6bfda99b781926b17e672fd6d27da2ba6d8d8fc39981dcb54
@@ -856,11 +856,11 @@ F src/upsert.c 215328c3f91623c520ec8672c44323553f12caeb4f01b1090ebdca99fdf7b4f1
F src/utf.c 3a20cbae9688af4c1e3754cc2520189d00762e37f60c2deb0b303360d166bba6
F src/util.c 36fb1150062957280777655976f3f9a75db236cb8207a0770ceae8d5ec17fcd3
F src/vacuum.c d580ceb395c1ae3d59da41cbfea60683ff7dd2b94ddf4d0f5657620159e2eeb7
F src/vdbe.c 3db56c6704aa937627b5707f137ecb5c0c7c853fc7d4b4ad3d64022a484d433a
F src/vdbe.c ca31e2e5b3f4cdb2dd3ac5c1be3194d71d01e42df1de217c558acf0acbb7ecd2
F src/vdbe.h 31eddcffc1d14c76c2a20fe4e137e1ee43d44f370896fae14a067052801a3625
F src/vdbeInt.h 5446f60e89b2aa7cdf3ab0ec4e7b01b8732cd9d52d9092a0b8b1bf700768f784
F src/vdbeapi.c 28fab30ed0acc981aecfdcaab0a421503609078e29850eb28494816682baf0a7
F src/vdbeaux.c fc45bf49e6aa5168e77f3b5c4b500feab14ea995d3e444cb4ae557d8404ca251
F src/vdbeaux.c a976da0c291f1c38c6911327edb35a82198b70fc567a05cbece51bde83b9bd06
F src/vdbeblob.c b1b4032cac46b41e44b957c4d00aee9851f862dfd85ecb68116ba49884b03dfd
F src/vdbemem.c e67d9c6484d868c879d20c70d00bf4a9058082f1d4058607ca15d50eb3aebc21
F src/vdbesort.c 49e366d0216c782eba287bf602384e4330d2526a22f1275492d2785ce103c79b
@@ -979,7 +979,7 @@ F test/bind2.test 918bc35135f4141809ead7585909cde57d44db90a7a62aef540127148f91aa
F test/bindxfer.test efecd12c580c14df5f4ad3b3e83c667744a4f7e0
F test/bitvec.test 75894a880520164d73b1305c1c3f96882615e142
F test/blob.test e7ac6c7d3a985cc4678c64f325292529a69ae252
F test/bloom1.test 04f3a17df8912bfdc292c41b59d79f93893fe69799f3089a64451f9112f9658f
F test/bloom1.test 3b6277a647ac503b5d5df331037b0c01c40e88cc9537b94eaf2d8aa334ed4c8f
F test/boundary1.tcl 6421b2d920d8b09539503a8673339d32f7609eb1
F test/boundary1.test 66d7f4706ccdb42d58eafdb081de07b0eb42d77b
F test/boundary2.tcl e34ef4e930cf1083150d4d2c603e146bd3b76bcb
@@ -1007,7 +1007,7 @@ F test/cast.test a2a3b32df86e3c0601ffa2e9f028a18796305d251801efea807092dbf374a04
F test/cffault.test 9d6b20606afe712374952eec4f8fd74b1a8097ef
F test/changes.test 4377d202a487f66fc2822c1bf57c46798c8b2caf7446f4f701723b1dbb6b86f6
F test/changes2.test 07949edcc732af28cb54276bfb7d99723bccc1e905a423648bf57ac5cb0dc792
F test/check.test 56e4ed457e9f8683b9fc56f5b964f461f6e8a8dd5a13f3d495408215d66419ed
F test/check.test 3a7972ccbaad80d496833da8714d69d9d5d4ce9e7211af1cd2a06ae488a7de12
F test/checkfault.test da6cb3d50247169efcb20bdf57863a3ccfa1d27d9e55cd324f0680096970f014
F test/chunksize.test faea11c5d6df9d392252a8dd879e1b1d68c9d3e8b7909cbed8bcec3b60c706f1
F test/cksumvfs.test 6f05dc95847c06a3dc10eee6b5ab1351d78314a52d0db15717c9388f4cb96646
@@ -1736,8 +1736,8 @@ F test/temptable2.test 76821347810ecc88203e6ef0dd6897b6036ac788e9dd3e6b04fd4d163
F test/temptable3.test d11a0974e52b347e45ee54ef1923c91ed91e4637
F test/temptrigger.test 38f0ca479b1822d3117069e014daabcaacefffcc
F test/tester.tcl c882b91b16a9958faeb931c5a0a22eb475ed3c7610d400f1c1ea898882b1768e
F test/testrunner.tcl 228732ca62ea60e97785e13528bd36627cf3c115d59364a5181ed7f85fd0031d x
F test/testrunner_data.tcl 07f2d36a6321bfd196975aca237db6bba1991032809264a1abeb62e697196d20
F test/testrunner.tcl 614c4a28f7f730acd7bec53e17d76602fb480e0d538b6ec548169e03a093f92d x
F test/testrunner_data.tcl 8d5fa3851c48bc94e26db0be325202e44f6ca4ed838272b8d5b10c23817621e6
F test/thread001.test a0985c117eab62c0c65526e9fa5d1360dd1cac5b03bde223902763274ce21899
F test/thread002.test c24c83408e35ba5a952a3638b7ac03ccdf1ce4409289c54a050ac4c5f1de7502
F test/thread003.test ee4c9efc3b86a6a2767516a37bd64251272560a7
@@ -2157,7 +2157,6 @@ F tool/genfkey.README e550911fa984c8255ebed2ef97824125d83806eb5232582700de949edf
F tool/genfkey.test b6afd7b825d797a1e1274f519ab5695373552ecad5cd373530c63533638a5a4f
F tool/getlock.c f4c39b651370156cae979501a7b156bdba50e7ce
F tool/index_usage.c f62a0c701b2c7ff2f3e21d206f093c123f222dbf07136a10ffd1ca15a5c706c5
F tool/kvtest-speed.sh 4761a9c4b3530907562314d7757995787f7aef8f
F tool/lemon.c f51a488369046cd4f4212d755a214a57673ded400cbeb01e298cbf63539e1d8c
F tool/lempar.c bdffd3b233a4e4e78056c9c01fadd2bb3fe902435abde3bce3d769fdf0d5cca2
F tool/libvers.c caafc3b689638a1d88d44bc5f526c2278760d9b9
@@ -2177,7 +2176,6 @@ F tool/mkopts.tcl 680f785fdb09729fd9ac50632413da4eadbdf9071535e3f26d03795828ab07
F tool/mkpragmatab.tcl 3801ce32f8c55fe63a3b279f231fb26c2c1a2ea9a09d2dd599239d87a609acec
F tool/mkshellc.tcl 9ce74de0fa904a2c56a96f8d8b5261246bacb0eaa8d7e184f9e18ff94145ebbc
F tool/mksourceid.c 36aa8020014aed0836fd13c51d6dc9219b0df1761d6b5f58ff5b616211b079b9
F tool/mkspeedsql.tcl a1a334d288f7adfe6e996f2e712becf076745c97
F tool/mksqlite3c-noext.tcl 351c55256213154cabb051a3c870ef9f4487de905015141ae50dc7578a901b84
F tool/mksqlite3c.tcl f11b63445c4840509248bd4aa151a81aea25d5415fef71943c8d436eba4f3b3c
F tool/mksqlite3h.tcl 989948c6a26e188e673d7c2f2f093ea3acd816ad6ac65bab596280075c8f3a45
@@ -2193,7 +2191,6 @@ F tool/pagesig.c f98909b4168d9cac11a2de7f031adea0e2f3131faa7515a72807c03ec58eafe
F tool/replace.tcl 511c61acfe563dfb58675efb4628bb158a13d48ff8322123ac447e9d25a82d9a
F tool/restore_jrnl.tcl 1079ecba47cc82fa82115b81c1f68097ab1f956f357ee8da5fc4b2589af6bd98
F tool/rollback-test.c 9fc98427d1e23e84429d7e6d07d9094fbdec65a5
F tool/run-speed-test.sh df9686c0991ea7c617b2cb5467d89d34b561f198ab91cb87735e27030ede92e8
F tool/showdb.c 3956d71e5193162609a60e8c9edfcf09274c00cfea2b1d221261427adb2b5cca
F tool/showjournal.c 5bad7ae8784a43d2b270d953060423b8bd480818
F tool/showlocks.c 9cc5e66d4ebbf2d194f39db2527ece92077e86ae627ddd233ee48e16e8142564
@@ -2202,17 +2199,11 @@ F tool/showstat4.c 0682ebea7abf4d3657f53c4a243f2e7eab48eab344ed36a94bb75dcd19a5c
F tool/showwal.c 11eca547980a066b081f512636151233350ac679f29ecf4ebfce7f4530230b3d
F tool/soak1.tcl a3892082ed1079671565c044e93b55c3c7f38829aedf53cc597c65d23ffdaddf
F tool/spaceanal.tcl 1f83962090a6b60e1d7bf92495d643e622bef9fe82ea3f2d22350dcbce9a12d0
F tool/speed-check.sh 2d9e337449f8eb9f5ab4c1ce7433024e334ea03a68d48aa9caee6229c7cf0774
F tool/speedtest.tcl 06c76698485ccf597b9e7dbb1ac70706eb873355
F tool/speedtest16.c ecb6542862151c3e6509bbc00509b234562ae81e
F tool/speedtest2.tcl ee2149167303ba8e95af97873c575c3e0fab58ff
F tool/speedtest8.c 2902c46588c40b55661e471d7a86e4dd71a18224
F tool/speedtest8inst1.c 7ce07da76b5e745783e703a834417d725b7d45fd
F tool/spellsift.tcl 52b4b04dc4333c7ab024f09d9d66ed6b6f7c6eb00b38497a09f338fa55d40618 x
F tool/split-sqlite3c.tcl 07e18a1d8cc3f6b3a4a1f3528e63c9b29a5c8a7bca0b8d394b231da464ce1247
F tool/sqldiff.c 134be7866be19f8beb32043d5aea5657f01aaeae2df8d33d758ff722c78666b9
F tool/sqlite3_analyzer.c.in 14f02cb5ec3c264cd6107d1f1dad77092b1cf440fc196c30b69ae87b56a1a43b
F tool/sqlite3_rsync.c 9a1cca2ab1271c59b37a6493c15dc1bcd0ab9149197a9125926bc08dd26b83fb
F tool/sqlite3_rsync.c dcf03373565eab543bd43926ed7e97ade744ab0599115fde1ff8aadf04fa4974
F tool/sqltclsh.c.in 1bcc2e9da58fadf17b0bf6a50e68c1159e602ce057210b655d50bad5aaaef898
F tool/sqltclsh.tcl 862f4cf1418df5e1315b5db3b5ebe88969e2a784525af5fbf9596592f14ed848
F tool/src-verify.c d00f93263aa2fa6ba0cba0106d95458e6effb94fdb5fc634f56834f90c05bbb4
@@ -2230,8 +2221,8 @@ F tool/version-info.c 3b36468a90faf1bbd59c65fd0eb66522d9f941eedd364fabccd7227350
F tool/warnings-clang.sh bbf6a1e685e534c92ec2bfba5b1745f34fb6f0bc2a362850723a9ee87c1b31a7
F tool/warnings.sh 49a486c5069de041aedcbde4de178293e0463ae9918ecad7539eedf0ec77a139
F tool/win/sqlite.vsix deb315d026cc8400325c5863eef847784a219a2f
P 80e6ddd560b3041fe9164b940d684eeb6f28560a6c48b23ff49095da52e85df8 ea9acb5573f4d71a314e4467d30477a1d01c8db648985750a42b3c047f404c9c
R e8277f87bf1c61b9f0b07d9f21635d54
P c68d0d353082a5810a48f01637d642bcdfd9f03dd244e0618fc0d7cf5f7b9b12 6eb2939a6093c0796910645172d80c53055559dd57c012f1dc815d89fbf84447
R 71f6d3dc7cf4b3e239f1610c654bf049
U drh
Z e8ead4bda11deb101225583febb05da9
Z ae0c7f7b4f8ebc4d74a219746691a228
# Remove this line to create a well-formed Fossil manifest.

View File

@@ -1 +1 @@
c68d0d353082a5810a48f01637d642bcdfd9f03dd244e0618fc0d7cf5f7b9b12
b17f5beab892028fa3e246858f7b34e9104ffb4edd970804e31e5c71bcd5bbc5

View File

@@ -1071,7 +1071,7 @@ int sqlite3TableColumnToIndex(Index *pIdx, int iCol){
int i;
i16 iCol16;
assert( iCol>=(-1) && iCol<=SQLITE_MAX_COLUMN );
assert( pIdx->nColumn<=SQLITE_MAX_COLUMN );
assert( pIdx->nColumn<=SQLITE_MAX_COLUMN+1 );
iCol16 = iCol;
for(i=0; i<pIdx->nColumn; i++){
if( iCol16==pIdx->aiColumn[i] ){

View File

@@ -3626,11 +3626,12 @@ void sqlite3CodeRhsOfIN(
sqlite3SelectDelete(pParse->db, pCopy);
sqlite3DbFree(pParse->db, dest.zAffSdst);
if( addrBloom ){
/* Remember that location of the Bloom filter in the P3 operand
** of the OP_Once that began this subroutine. tag-202407032019 */
sqlite3VdbeGetOp(v, addrOnce)->p3 = dest.iSDParm2;
if( dest.iSDParm2==0 ){
sqlite3VdbeChangeToNoop(v, addrBloom);
}else{
sqlite3VdbeGetOp(v, addrOnce)->p3 = dest.iSDParm2;
/* If the Bloom filter won't actually be used, keep it small */
sqlite3VdbeGetOp(v, addrBloom)->p1 = 10;
}
}
if( rc ){
@@ -4077,7 +4078,7 @@ static void sqlite3ExprCodeIN(
if( ExprHasProperty(pExpr, EP_Subrtn) ){
const VdbeOp *pOp = sqlite3VdbeGetOp(v, pExpr->y.sub.iAddr);
assert( pOp->opcode==OP_Once || pParse->nErr );
if( pOp->opcode==OP_Once && pOp->p3>0 ){
if( pOp->opcode==OP_Once && pOp->p3>0 ){ /* tag-202407032019 */
assert( OptimizationEnabled(pParse->db, SQLITE_BloomFilter) );
sqlite3VdbeAddOp4Int(v, OP_Filter, pOp->p3, destIfFalse,
rLhs, nVector); VdbeCoverage(v);
@@ -5926,11 +5927,11 @@ void sqlite3ExprIfTrue(Parse *pParse, Expr *pExpr, int dest, int jumpIfNull){
assert( TK_ISNULL==OP_IsNull ); testcase( op==TK_ISNULL );
assert( TK_NOTNULL==OP_NotNull ); testcase( op==TK_NOTNULL );
r1 = sqlite3ExprCodeTemp(pParse, pExpr->pLeft, &regFree1);
sqlite3VdbeTypeofColumn(v, r1);
assert( regFree1==0 || regFree1==r1 );
if( regFree1 ) sqlite3VdbeTypeofColumn(v, r1);
sqlite3VdbeAddOp2(v, op, r1, dest);
VdbeCoverageIf(v, op==TK_ISNULL);
VdbeCoverageIf(v, op==TK_NOTNULL);
testcase( regFree1==0 );
break;
}
case TK_BETWEEN: {
@@ -6101,11 +6102,11 @@ void sqlite3ExprIfFalse(Parse *pParse, Expr *pExpr, int dest, int jumpIfNull){
case TK_ISNULL:
case TK_NOTNULL: {
r1 = sqlite3ExprCodeTemp(pParse, pExpr->pLeft, &regFree1);
sqlite3VdbeTypeofColumn(v, r1);
assert( regFree1==0 || regFree1==r1 );
if( regFree1 ) sqlite3VdbeTypeofColumn(v, r1);
sqlite3VdbeAddOp2(v, op, r1, dest);
testcase( op==TK_ISNULL ); VdbeCoverageIf(v, op==TK_ISNULL);
testcase( op==TK_NOTNULL ); VdbeCoverageIf(v, op==TK_NOTNULL);
testcase( regFree1==0 );
break;
}
case TK_BETWEEN: {

View File

@@ -398,7 +398,7 @@ struct JsonParse {
** Forward references
**************************************************************************/
static void jsonReturnStringAsBlob(JsonString*);
static int jsonFuncArgMightBeBinary(sqlite3_value *pJson);
static int jsonArgIsJsonb(sqlite3_value *pJson, JsonParse *p);
static u32 jsonTranslateBlobToText(const JsonParse*,u32,JsonString*);
static void jsonReturnParse(sqlite3_context*,JsonParse*);
static JsonParse *jsonParseFuncArg(sqlite3_context*,sqlite3_value*,u32);
@@ -816,11 +816,9 @@ static void jsonAppendSqlValue(
break;
}
default: {
if( jsonFuncArgMightBeBinary(pValue) ){
JsonParse px;
memset(&px, 0, sizeof(px));
px.aBlob = (u8*)sqlite3_value_blob(pValue);
px.nBlob = sqlite3_value_bytes(pValue);
JsonParse px;
memset(&px, 0, sizeof(px));
if( jsonArgIsJsonb(pValue, &px) ){
jsonTranslateBlobToText(&px, 0, p);
}else if( p->eErr==0 ){
sqlite3_result_error(p->pCtx, "JSON cannot hold BLOB values", -1);
@@ -2494,33 +2492,6 @@ static u32 jsonTranslateBlobToPrettyText(
return i;
}
/* Return true if the input pJson
**
** For performance reasons, this routine does not do a detailed check of the
** input BLOB to ensure that it is well-formed. Hence, false positives are
** possible. False negatives should never occur, however.
*/
static int jsonFuncArgMightBeBinary(sqlite3_value *pJson){
u32 sz, n;
const u8 *aBlob;
int nBlob;
JsonParse s;
if( sqlite3_value_type(pJson)!=SQLITE_BLOB ) return 0;
aBlob = sqlite3_value_blob(pJson);
nBlob = sqlite3_value_bytes(pJson);
if( nBlob<1 ) return 0;
if( NEVER(aBlob==0) || (aBlob[0] & 0x0f)>JSONB_OBJECT ) return 0;
memset(&s, 0, sizeof(s));
s.aBlob = (u8*)aBlob;
s.nBlob = nBlob;
n = jsonbPayloadSize(&s, 0, &sz);
if( n==0 ) return 0;
if( sz+n!=(u32)nBlob ) return 0;
if( (aBlob[0] & 0x0f)<=JSONB_FALSE && sz>0 ) return 0;
return sz+n==(u32)nBlob;
}
/*
** Given that a JSONB_ARRAY object starts at offset i, return
** the number of entries in that array.
@@ -3348,10 +3319,7 @@ static int jsonFunctionArgToBlob(
return 0;
}
case SQLITE_BLOB: {
if( jsonFuncArgMightBeBinary(pArg) ){
pParse->aBlob = (u8*)sqlite3_value_blob(pArg);
pParse->nBlob = sqlite3_value_bytes(pArg);
}else{
if( !jsonArgIsJsonb(pArg, pParse) ){
sqlite3_result_error(ctx, "JSON cannot hold BLOB values", -1);
return 1;
}
@@ -3502,27 +3470,46 @@ jsonInsertIntoBlob_patherror:
/*
** If pArg is a blob that seems like a JSONB blob, then initialize
** p to point to that JSONB and return TRUE. If pArg does not seem like
** a JSONB blob, then return FALSE;
** a JSONB blob, then return FALSE.
**
** This routine is only called if it is already known that pArg is a
** blob. The only open question is whether or not the blob appears
** to be a JSONB blob.
** For small BLOBs (having no more than 7 bytes of payload) a full
** validity check is done. So for small BLOBs this routine only returns
** true if the value is guaranteed to be a valid JSONB. For larger BLOBs
** (8 byte or more of payload) only the size of the outermost element is
** checked to verify that the BLOB is superficially valid JSONB.
**
** A full JSONB validation is done on smaller BLOBs because those BLOBs might
** also be text JSON that has been incorrectly cast into a BLOB.
** (See tag-20240123-a and https://sqlite.org/forum/forumpost/012136abd5)
** If the BLOB is 9 bytes are larger, then it is not possible for the
** superficial size check done here to pass if the input is really text
** JSON so we do not need to look deeper in that case.
**
** Why we only need to do full JSONB validation for smaller BLOBs:
**
** The first byte of valid JSON text must be one of: '{', '[', '"', ' ', '\n',
** '\r', '\t', '-', or a digit '0' through '9'. Of these, only a subset
** can also be the first byte of JSONB: '{', '[', and digits '3'
** through '9'. In every one of those cases, the payload size is 7 bytes
** or less. So if we do full JSONB validation for every BLOB where the
** payload is less than 7 bytes, we will never get a false positive for
** JSONB on an input that is really text JSON.
*/
static int jsonArgIsJsonb(sqlite3_value *pArg, JsonParse *p){
u32 n, sz = 0;
u8 c;
if( sqlite3_value_type(pArg)!=SQLITE_BLOB ) return 0;
p->aBlob = (u8*)sqlite3_value_blob(pArg);
p->nBlob = (u32)sqlite3_value_bytes(pArg);
if( p->nBlob==0 ){
p->aBlob = 0;
return 0;
}
if( NEVER(p->aBlob==0) ){
return 0;
}
if( (p->aBlob[0] & 0x0f)<=JSONB_OBJECT
if( p->nBlob>0
&& ALWAYS(p->aBlob!=0)
&& ((c = p->aBlob[0]) & 0x0f)<=JSONB_OBJECT
&& (n = jsonbPayloadSize(p, 0, &sz))>0
&& sz+n==p->nBlob
&& ((p->aBlob[0] & 0x0f)>JSONB_FALSE || sz==0)
&& ((c & 0x0f)>JSONB_FALSE || sz==0)
&& (sz>7
|| (c!=0x7b && c!=0x5b && !sqlite3Isdigit(c))
|| jsonbValidityCheck(p, 0, p->nBlob, 1)==0)
){
return 1;
}
@@ -4615,21 +4602,17 @@ static void jsonValidFunc(
return;
}
case SQLITE_BLOB: {
if( jsonFuncArgMightBeBinary(argv[0]) ){
JsonParse py;
memset(&py, 0, sizeof(py));
if( jsonArgIsJsonb(argv[0], &py) ){
if( flags & 0x04 ){
/* Superficial checking only - accomplished by the
** jsonFuncArgMightBeBinary() call above. */
** jsonArgIsJsonb() call above. */
res = 1;
}else if( flags & 0x08 ){
/* Strict checking. Check by translating BLOB->TEXT->BLOB. If
** no errors occur, call that a "strict check". */
JsonParse px;
u32 iErr;
memset(&px, 0, sizeof(px));
px.aBlob = (u8*)sqlite3_value_blob(argv[0]);
px.nBlob = sqlite3_value_bytes(argv[0]);
iErr = jsonbValidityCheck(&px, 0, px.nBlob, 1);
res = iErr==0;
res = 0==jsonbValidityCheck(&py, 0, py.nBlob, 1);
}
break;
}
@@ -4687,9 +4670,7 @@ static void jsonErrorFunc(
UNUSED_PARAMETER(argc);
memset(&s, 0, sizeof(s));
s.db = sqlite3_context_db_handle(ctx);
if( jsonFuncArgMightBeBinary(argv[0]) ){
s.aBlob = (u8*)sqlite3_value_blob(argv[0]);
s.nBlob = sqlite3_value_bytes(argv[0]);
if( jsonArgIsJsonb(argv[0], &s) ){
iErrPos = (i64)jsonbValidityCheck(&s, 0, s.nBlob, 1);
}else{
s.zJson = (char*)sqlite3_value_text(argv[0]);
@@ -5374,9 +5355,8 @@ static int jsonEachFilter(
memset(&p->sParse, 0, sizeof(p->sParse));
p->sParse.nJPRef = 1;
p->sParse.db = p->db;
if( jsonFuncArgMightBeBinary(argv[0]) ){
p->sParse.nBlob = sqlite3_value_bytes(argv[0]);
p->sParse.aBlob = (u8*)sqlite3_value_blob(argv[0]);
if( jsonArgIsJsonb(argv[0], &p->sParse) ){
/* We have JSONB */
}else{
p->sParse.zJson = (char*)sqlite3_value_text(argv[0]);
p->sParse.nJson = sqlite3_value_bytes(argv[0]);

View File

@@ -1243,30 +1243,6 @@ static void shellDtostr(
sqlite3_result_text(pCtx, z, -1, SQLITE_TRANSIENT);
}
/*
** SQL function: shell_module_schema(X)
**
** Return a fake schema for the table-valued function or eponymous virtual
** table X.
*/
static void shellModuleSchema(
sqlite3_context *pCtx,
int nVal,
sqlite3_value **apVal
){
const char *zName;
char *zFake;
UNUSED_PARAMETER(nVal);
zName = (const char*)sqlite3_value_text(apVal[0]);
zFake = zName? shellFakeSchema(sqlite3_context_db_handle(pCtx), 0, zName) : 0;
if( zFake ){
sqlite3_result_text(pCtx, sqlite3_mprintf("/* %s */", zFake),
-1, sqlite3_free);
free(zFake);
}
}
/*
** SQL function: shell_add_schema(S,X)
**
@@ -5710,6 +5686,39 @@ static void shellUSleepFunc(
sqlite3_result_int(context, sleep);
}
/*
** SQL function: shell_module_schema(X)
**
** Return a fake schema for the table-valued function or eponymous virtual
** table X.
*/
static void shellModuleSchema(
sqlite3_context *pCtx,
int nVal,
sqlite3_value **apVal
){
const char *zName;
char *zFake;
ShellState *p = (ShellState*)sqlite3_user_data(pCtx);
FILE *pSavedLog = p->pLog;
UNUSED_PARAMETER(nVal);
zName = (const char*)sqlite3_value_text(apVal[0]);
/* Temporarily disable the ".log" when calling shellFakeSchema() because
** shellFakeSchema() might generate failures for some ephemeral virtual
** tables due to missing arguments. Example: fts4aux.
** https://sqlite.org/forum/forumpost/42fe6520b803be51 */
p->pLog = 0;
zFake = zName? shellFakeSchema(sqlite3_context_db_handle(pCtx), 0, zName) : 0;
p->pLog = pSavedLog;
if( zFake ){
sqlite3_result_text(pCtx, sqlite3_mprintf("/* %s */", zFake),
-1, sqlite3_free);
free(zFake);
}
}
/* Flags for open_db().
**
** The default behavior of open_db() is to exit(1) if the database fails to
@@ -5853,7 +5862,7 @@ static void open_db(ShellState *p, int openFlags){
shellDtostr, 0, 0);
sqlite3_create_function(p->db, "shell_add_schema", 3, SQLITE_UTF8, 0,
shellAddSchemaName, 0, 0);
sqlite3_create_function(p->db, "shell_module_schema", 1, SQLITE_UTF8, 0,
sqlite3_create_function(p->db, "shell_module_schema", 1, SQLITE_UTF8, p,
shellModuleSchema, 0, 0);
sqlite3_create_function(p->db, "shell_putsnl", 1, SQLITE_UTF8, p,
shellPutsFunc, 0, 0);

View File

@@ -2644,7 +2644,7 @@ case OP_BitNot: { /* same as TK_BITNOT, in1, out2 */
break;
}
/* Opcode: Once P1 P2 * * *
/* Opcode: Once P1 P2 P3 * *
**
** Fall through to the next instruction the first time this opcode is
** encountered on each invocation of the byte-code program. Jump to P2
@@ -2660,6 +2660,12 @@ case OP_BitNot: { /* same as TK_BITNOT, in1, out2 */
** whether or not the jump should be taken. The bitmask is necessary
** because the self-altering code trick does not work for recursive
** triggers.
**
** The P3 operand is not used directly by this opcode. However P3 is
** used by the code generator as follows: If this opcode is the start
** of a subroutine and that subroutine uses a Bloom filter, then P3 will
** be the register that holds that Bloom filter. See tag-202407032019
** in the source code for implementation details.
*/
case OP_Once: { /* jump */
u32 iAddr; /* Address of this instruction */
@@ -3705,6 +3711,7 @@ case OP_MakeRecord: {
zHdr += sqlite3PutVarint(zHdr, serial_type);
if( pRec->n ){
assert( pRec->z!=0 );
assert( pRec->z!=(const char*)sqlite3CtypeMap );
memcpy(zPayload, pRec->z, pRec->n);
zPayload += pRec->n;
}

View File

@@ -1306,6 +1306,9 @@ void sqlite3VdbeChangeP5(Vdbe *p, u16 p5){
*/
void sqlite3VdbeTypeofColumn(Vdbe *p, int iDest){
VdbeOp *pOp = sqlite3VdbeGetLastOp(p);
#ifdef SQLITE_DEBUG
while( pOp->opcode==OP_ReleaseReg ) pOp--;
#endif
if( pOp->p3==iDest && pOp->opcode==OP_Column ){
pOp->p5 |= OPFLAG_TYPEOFARG;
}

View File

@@ -224,6 +224,18 @@ do_execsql_test 5.3 {
SELECT 0 as c_0
);
} {0}
# 2025-04-30 https://sqlite.org/forum/forumpost/792a09cb3df9e69f
# A continuation of the above.
#
do_execsql_test 6.1 {
DROP TABLE IF EXISTS t1;
CREATE TABLE t1(a);
SELECT 111 IN (
SELECT 222 FROM (SELECT 333 ORDER BY 1)
UNION ALL
SELECT 444 FROM (SELECT 555 FROM t1 ORDER BY 1)
);
} 0
finish_test

View File

@@ -612,4 +612,36 @@ do_catchsql_test 12.81 {
INSERT INTO t1(a) VALUES(456);
} {1 {CHECK constraint failed: a NOT BETWEEN +a AND 999999}}
#-------------------------------------------------------------------------
reset_db
do_execsql_test 13.1.0 {
CREATE TABLE Table0 (Col0 , CHECK(Table0.Col0 NOT NULL ) ) ;
REPLACE INTO Table0 VALUES (hex(randomblob(100000)));
}
integrity_check 13.1.1
do_execsql_test 13.1.2 {
UPDATE OR REPLACE Table0 SET Col0 = Table0.Col0 ;
}
integrity_check 13.1.3
do_execsql_test 13.1.4 {
SELECT length(col0) FROM table0;
} {200000}
do_execsql_test 13.2.0 {
CREATE TABLE t2 (x , CHECK((NOT (x ISNULL) )));
REPLACE INTO t2 VALUES (hex(randomblob(100000)));
}
do_execsql_test 13.2.1 {
SELECT length(x) FROM t2
} {200000}
do_execsql_test 13.2.2 {
UPDATE OR REPLACE t2 SET x = x;
}
do_execsql_test 13.2.3 {
SELECT length(x) FROM t2
} {200000}
integrity_check 13.2.4
finish_test

View File

@@ -98,6 +98,7 @@ Usage:
--config CONFIGS Only use configs on comma-separate list CONFIGS
--dryrun Write what would have happened to testrunner.log
--explain Write summary to stdout
--fuzzdb FILENAME Additional external fuzzcheck database
--jobs NUM Run tests using NUM separate processes
--omit CONFIGS Omit configs on comma-separated list CONFIGS
--status Show the full "status" report while running
@@ -169,7 +170,6 @@ Full documentation here: https://sqlite.org/src/doc/trunk/doc/testrunner.md
proc guess_number_of_cores {} {
if {[catch {number_of_cores} ret]} {
set ret 4
if {$::tcl_platform(platform) eq "windows"} {
catch { set ret $::env(NUMBER_OF_PROCESSORS) }
} else {
@@ -532,7 +532,7 @@ proc show_status {db cls} {
(SELECT value FROM config WHERE name='start')
}]
set total 0
set totalw 0
foreach s {"" ready running done failed omit} { set S($s) 0; set W($s) 0; }
set workpending 0
$db eval {
@@ -557,7 +557,7 @@ proc show_status {db cls} {
flush stdout
}
puts [format %-79.79s "Command: \[testrunner.tcl$cmdline\]"]
puts [format %-79.79s "Summary: [elapsetime $tm], $fin/$total jobs,\
puts [format %-79.79s "Summary: [elapsetime $tm], $fin/$totalw jobs,\
$ne errors, $nt tests"]
set srcdir [file dirname [file dirname $TRG(info_script)]]
@@ -812,6 +812,9 @@ for {set ii 0} {$ii < [llength $argv]} {incr ii} {
} elseif {($n>2 && [string match "$a*" --omit]) || $a=="-c"} {
incr ii
set TRG(omitconfig) [lindex $argv $ii]
} elseif {($n>2 && [string match "$a*" --fuzzdb])} {
incr ii
set env(FUZZDB) [lindex $argv $ii]
} elseif {[string match "$a*" --stop-on-error]} {
set TRG(stopOnError) 1
} elseif {[string match "$a*" --stop-on-coredump]} {
@@ -1003,6 +1006,35 @@ proc add_job {args} {
trdb last_insert_rowid
}
# Look to see if $jobcmd matches any of the glob patterns given in
# $patternlist. Return true if there is a match. Return false
# if no match is seen.
#
# An empty patternlist matches everything
#
proc job_matches_any_pattern {patternlist jobcmd} {
set bMatch 0
if {[llength $patternlist]==0} {return 1}
foreach p $patternlist {
set p [string trim $p *]
if {[string index $p 0]=="^"} {
set p [string range $p 1 end]
} else {
set p "*$p"
}
if {[string index $p end]=="\$"} {
set p [string range $p 0 end-1]
} else {
set p "$p*"
}
if {[string match $p $jobcmd]} {
set bMatch 1
break
}
}
return $bMatch
}
# Argument $build is either an empty string, or else a list of length 3
@@ -1016,6 +1048,7 @@ proc add_job {args} {
#
proc add_tcl_jobs {build config patternlist {shelldepid ""}} {
global TRG
set ntcljob 0
set topdir [file dirname $::testdir]
set testrunner_tcl [file normalize [info script]]
@@ -1033,26 +1066,8 @@ proc add_tcl_jobs {build config patternlist {shelldepid ""}} {
# The ::testspec array is populated by permutations.test
foreach f [dict get $::testspec($config) -files] {
if {[llength $patternlist]>0} {
set bMatch 0
foreach p $patternlist {
set p [string trim $p *]
if {[string index $p 0]=="^"} {
set p [string range $p 1 end]
} else {
set p "*$p"
}
if {[string index $p end]=="\$"} {
set p [string range $p 0 end-1]
} else {
set p "$p*"
}
if {[string match $p "$config [file tail $f]"]} {
set bMatch 1
break
}
}
if {$bMatch==0} continue
if {![job_matches_any_pattern $patternlist "$config [file tail $f]"]} {
continue
}
if {[file pathtype $f]!="absolute"} { set f [file join $::testdir $f] }
@@ -1077,6 +1092,7 @@ proc add_tcl_jobs {build config patternlist {shelldepid ""}} {
set depid [lindex $build 0]
if {$shelldepid!="" && [lsearch $lProp shell]>=0} { set depid $shelldepid }
incr ntcljob
add_job \
-displaytype tcl \
-displayname $displayname \
@@ -1084,6 +1100,10 @@ proc add_tcl_jobs {build config patternlist {shelldepid ""}} {
-depid $depid \
-priority $priority
}
if {$ntcljob==0 && [llength $build]>0} {
set bldid [lindex $build 0]
trdb eval {DELETE FROM jobs WHERE rowid=$bldid}
}
}
proc add_build_job {buildname target {postcmd ""} {depid ""}} {
@@ -1146,26 +1166,57 @@ proc add_make_job {bld target} {
-priority 1
}
proc add_fuzztest_jobs {buildname} {
proc add_fuzztest_jobs {buildname patternlist} {
global env TRG
# puts buildname=$buildname
foreach {interpreter scripts} [trd_fuzztest_data] {
foreach {interpreter scripts} [trd_fuzztest_data $buildname] {
set bldDone 0
set subcmd [lrange $interpreter 1 end]
set interpreter [lindex $interpreter 0]
set bld [add_build_job $buildname $interpreter]
foreach {depid dirname displayname} $bld {}
if {[string match fuzzcheck* $interpreter]
&& [info exists env(FUZZDB)]
&& [file readable $env(FUZZDB)]
&& $buildname ne "Windows-Win32Heap"
&& $buildname ne "Windows-Memdebug"
} {
set TRG(FUZZDB) $env(FUZZDB)
set fname [file normalize $env(FUZZDB)]
set N [expr {([file size $fname]+4999999)/5000000}]
for {set i 0} {$i<$N} {incr i} {
lappend scripts [list --slice $i $N $fname]
}
}
foreach s $scripts {
# Fuzz data files fuzzdata1.db and fuzzdata2.db are larger than
# the others. So ensure that these are run as a higher priority.
set tail [file tail $s]
if {$tail=="fuzzdata1.db" || $tail=="fuzzdata2.db"} {
if {[llength $s]==1} {
set tail [file tail $s]
} else {
set fname [lindex $s end]
set tail [lrange $s 0 end-1]
lappend tail [file tail $fname]
}
if {![job_matches_any_pattern $patternlist "$interpreter $tail"]} {
continue
}
if {!$bldDone} {
set bld [add_build_job $buildname $interpreter]
foreach {depid dirname displayname} $bld {}
set bldDone 1
}
if {[string match ?-slice* $tail]} {
set priority 15
} elseif {$tail=="fuzzdata1.db"
|| $tail=="fuzzdata2.db"
|| $tail=="fuzzdata8.db"} {
set priority 5
} else {
set priority 1
}
add_job \
-displaytype fuzz \
-displayname "$buildname $interpreter $tail" \
@@ -1201,9 +1252,7 @@ proc add_devtest_jobs {lBld patternlist} {
foreach b $lBld {
set bld [add_build_job $b $TRG(testfixture)]
add_tcl_jobs $bld veryquick $patternlist SHELL
if {$patternlist==""} {
add_fuzztest_jobs $b
}
add_fuzztest_jobs $b $patternlist
if {[trdb one "SELECT EXISTS (SELECT 1 FROM jobs WHERE depid='SHELL')"]} {
set sbld [add_shell_build_job $b [lindex $bld 1] [lindex $bld 0]]
@@ -1277,13 +1326,11 @@ proc add_jobs_from_cmdline {patternlist} {
add_tcl_jobs $bld $c $patternlist SHELL
}
if {$patternlist==""} {
foreach e [trd_extras $TRG(platform) $b] {
if {$e=="fuzztest"} {
add_fuzztest_jobs $b
} else {
add_make_job $bld $e
}
foreach e [trd_extras $TRG(platform) $b] {
if {$e=="fuzztest"} {
add_fuzztest_jobs $b $patternlist
} elseif {[job_matches_any_pattern $patternlist $e]} {
add_make_job $bld $e
}
}
@@ -1603,6 +1650,9 @@ proc run_testset {} {
puts "\nTest database is $TRG(dbname)"
puts "Test log is $TRG(logname)"
if {[info exists TRG(FUZZDB)]} {
puts "Extra fuzzcheck data taken from $TRG(FUZZDB)"
}
trdb eval {
SELECT sum(ntest) AS totaltest,
sum(nerr) AS totalerr
@@ -1655,7 +1705,13 @@ proc explain_layer {indent depid} {
puts "${indent}$displayname in $dirname"
explain_layer "${indent} " $jobid
} elseif {$showtests} {
set tail [lindex $displayname end]
if {[lindex $displayname end-3] eq "--slice"} {
set M [lindex $displayname end-2]
set N [lindex $displayname end-1]
set tail "[lindex $displayname end] (slice $M/$N)"
} else {
set tail [lindex $displayname end]
}
set e1 [lindex $displayname 1]
if {[string match config=* $e1]} {
set cfg [string range $e1 7 end]

View File

@@ -430,7 +430,7 @@ proc trd_extras {platform bld} {
# Usage:
#
# trd_fuzztest_data
# trd_fuzztest_data $buildname
#
# This returns data used by testrunner.tcl to run commands equivalent
# to [make fuzztest]. The returned value is a list, which should be
@@ -450,16 +450,24 @@ proc trd_extras {platform bld} {
# directory containing this file). "fuzzcheck" and "sessionfuzz" have .exe
# extensions on windows.
#
proc trd_fuzztest_data {} {
proc trd_fuzztest_data {buildname} {
set EXE ""
set lFuzzDb [glob [file join $::testdir fuzzdata*.db]]
set lSessionDb [glob [file join $::testdir sessionfuzz-data*.db]]
set sanBuilds {All-Debug Apple Have-Not Update-Delete-Limit}
if {$::tcl_platform(platform) eq "windows"} {
return [list fuzzcheck.exe $lFuzzDb]
} elseif {[lsearch $sanBuilds $buildname]>=0} {
return [list [trd_get_bin_name fuzzcheck] $lFuzzDb \
[trd_get_bin_name fuzzcheck-asan] $lFuzzDb \
[trd_get_bin_name fuzzcheck-ubsan] $lFuzzDb \
{sessionfuzz run} $lSessionDb]
} else {
return [list [trd_get_bin_name fuzzcheck] $lFuzzDb \
{sessionfuzz run} $lSessionDb]
}
return [list [trd_get_bin_name fuzzcheck] $lFuzzDb {sessionfuzz run} $lSessionDb]
}

View File

@@ -1,35 +0,0 @@
#!/bin/bash
#
# A script for running speed tests using kvtest.
#
# The test database must be set up first. Recommended
# command-line:
#
# ./kvtest init kvtest.db --count 100K --size 12K --variance 5K
if test "$1" = ""
then
echo "Usage: $0 OUTPUTFILE [OPTIONS]"
exit
fi
NAME=$1
shift
OPTS="-DSQLITE_THREADSAFE=0 -DSQLITE_OMIT_LOAD_EXTENSION -DSQLITE_DIRECT_OVERFLOW_READ -DUSE_PREAD"
KVARGS="--count 100K --stats"
gcc -g -Os -I. $OPTS $* kvtest.c sqlite3.c -o kvtest
# First run using SQL
rm cachegrind.out.[1-9][0-9]*
valgrind --tool=cachegrind ./kvtest run kvtest.db $KVARGS 2>&1 | tee summary-kvtest-$NAME.txt
mv cachegrind.out.[1-9][0-9]* cachegrind.out.sql-$NAME
cg_anno.tcl cachegrind.out.sql-$NAME >cout-kvtest-sql-$NAME.txt
# Second run using the sqlite3_blob object
valgrind --tool=cachegrind ./kvtest run kvtest.db $KVARGS --blob-api 2>&1 | tee -a summary-kvtest-$NAME.txt
mv cachegrind.out.[1-9][0-9]* cachegrind.out.$NAME
cg_anno.tcl cachegrind.out.$NAME >cout-kvtest-$NAME.txt
# Diff the sqlite3_blob API analysis for non-trunk runs.
if test "$NAME" != "trunk"; then
fossil test-diff --tk cout-kvtest-trunk.txt cout-kvtest-$NAME.txt &
fi

View File

@@ -1,237 +0,0 @@
# 2008 October 9
#
# The author disclaims copyright to this source code. In place of
# a legal notice, here is a blessing:
#
# May you do good and not evil.
# May you find forgiveness for yourself and forgive others.
# May you share freely, never taking more than you give.
#
#*************************************************************************
# This file generates SQL text used for performance testing.
#
# $Id: mkspeedsql.tcl,v 1.1 2008/10/09 17:57:34 drh Exp $
#
# Set a uniform random seed
expr srand(0)
# The number_name procedure below converts its argment (an integer)
# into a string which is the English-language name for that number.
#
# Example:
#
# puts [number_name 123] -> "one hundred twenty three"
#
set ones {zero one two three four five six seven eight nine
ten eleven twelve thirteen fourteen fifteen sixteen seventeen
eighteen nineteen}
set tens {{} ten twenty thirty forty fifty sixty seventy eighty ninety}
proc number_name {n} {
if {$n>=1000} {
set txt "[number_name [expr {$n/1000}]] thousand"
set n [expr {$n%1000}]
} else {
set txt {}
}
if {$n>=100} {
append txt " [lindex $::ones [expr {$n/100}]] hundred"
set n [expr {$n%100}]
}
if {$n>=20} {
append txt " [lindex $::tens [expr {$n/10}]]"
set n [expr {$n%10}]
}
if {$n>0} {
append txt " [lindex $::ones $n]"
}
set txt [string trim $txt]
if {$txt==""} {set txt zero}
return $txt
}
# Create a database schema.
#
puts {
PRAGMA page_size=1024;
PRAGMA cache_size=8192;
PRAGMA locking_mode=EXCLUSIVE;
CREATE TABLE t1(a INTEGER, b INTEGER, c TEXT);
CREATE TABLE t2(a INTEGER, b INTEGER, c TEXT);
CREATE INDEX i2a ON t2(a);
CREATE INDEX i2b ON t2(b);
SELECT name FROM sqlite_master ORDER BY 1;
}
# 50000 INSERTs on an unindexed table
#
set t1c_list {}
puts {BEGIN;}
for {set i 1} {$i<=50000} {incr i} {
set r [expr {int(rand()*500000)}]
set x [number_name $r]
lappend t1c_list $x
puts "INSERT INTO t1 VALUES($i,$r,'$x');"
}
puts {COMMIT;}
# 50000 INSERTs on an indexed table
#
puts {BEGIN;}
for {set i 1} {$i<=50000} {incr i} {
set r [expr {int(rand()*500000)}]
puts "INSERT INTO t2 VALUES($i,$r,'[number_name $r]');"
}
puts {COMMIT;}
# 50 SELECTs on an integer comparison. There is no index so
# a full table scan is required.
#
for {set i 0} {$i<50} {incr i} {
set lwr [expr {$i*100}]
set upr [expr {($i+10)*100}]
puts "SELECT count(*), avg(b) FROM t1 WHERE b>=$lwr AND b<$upr;"
}
# 50 SELECTs on an LIKE comparison. There is no index so a full
# table scan is required.
#
for {set i 0} {$i<50} {incr i} {
puts "SELECT count(*), avg(b) FROM t1 WHERE c LIKE '%[number_name $i]%';"
}
# Create indices
#
puts {BEGIN;}
puts {
CREATE INDEX i1a ON t1(a);
CREATE INDEX i1b ON t1(b);
CREATE INDEX i1c ON t1(c);
}
puts {COMMIT;}
# 5000 SELECTs on an integer comparison where the integer is
# indexed.
#
set sql {}
for {set i 0} {$i<5000} {incr i} {
set lwr [expr {$i*100}]
set upr [expr {($i+10)*100}]
puts "SELECT count(*), avg(b) FROM t1 WHERE b>=$lwr AND b<$upr;"
}
# 100000 random SELECTs against rowid.
#
for {set i 1} {$i<=100000} {incr i} {
set id [expr {int(rand()*50000)+1}]
puts "SELECT c FROM t1 WHERE rowid=$id;"
}
# 100000 random SELECTs against a unique indexed column.
#
for {set i 1} {$i<=100000} {incr i} {
set id [expr {int(rand()*50000)+1}]
puts "SELECT c FROM t1 WHERE a=$id;"
}
# 50000 random SELECTs against an indexed column text column
#
set nt1c [llength $t1c_list]
for {set i 0} {$i<50000} {incr i} {
set r [expr {int(rand()*$nt1c)}]
set c [lindex $t1c_list $i]
puts "SELECT c FROM t1 WHERE c='$c';"
}
# Vacuum
puts {VACUUM;}
# 5000 updates of ranges where the field being compared is indexed.
#
puts {BEGIN;}
for {set i 0} {$i<5000} {incr i} {
set lwr [expr {$i*2}]
set upr [expr {($i+1)*2}]
puts "UPDATE t1 SET b=b*2 WHERE a>=$lwr AND a<$upr;"
}
puts {COMMIT;}
# 50000 single-row updates. An index is used to find the row quickly.
#
puts {BEGIN;}
for {set i 0} {$i<50000} {incr i} {
set r [expr {int(rand()*500000)}]
puts "UPDATE t1 SET b=$r WHERE a=$i;"
}
puts {COMMIT;}
# 1 big text update that touches every row in the table.
#
puts {
UPDATE t1 SET c=a;
}
# Many individual text updates. Each row in the table is
# touched through an index.
#
puts {BEGIN;}
for {set i 1} {$i<=50000} {incr i} {
set r [expr {int(rand()*500000)}]
puts "UPDATE t1 SET c='[number_name $r]' WHERE a=$i;"
}
puts {COMMIT;}
# Delete all content in a table.
#
puts {DELETE FROM t1;}
# Copy one table into another
#
puts {INSERT INTO t1 SELECT * FROM t2;}
# Delete all content in a table, one row at a time.
#
puts {DELETE FROM t1 WHERE 1;}
# Refill the table yet again
#
puts {INSERT INTO t1 SELECT * FROM t2;}
# Drop the table and recreate it without its indices.
#
puts {BEGIN;}
puts {
DROP TABLE t1;
CREATE TABLE t1(a INTEGER, b INTEGER, c TEXT);
}
puts {COMMIT;}
# Refill the table yet again. This copy should be faster because
# there are no indices to deal with.
#
puts {INSERT INTO t1 SELECT * FROM t2;}
# Select 20000 rows from the table at random.
#
puts {
SELECT rowid FROM t1 ORDER BY random() LIMIT 20000;
}
# Delete 20000 random rows from the table.
#
puts {
DELETE FROM t1 WHERE rowid IN
(SELECT rowid FROM t1 ORDER BY random() LIMIT 20000);
}
puts {SELECT count(*) FROM t1;}
# Delete 20000 more rows at random from the table.
#
puts {
DELETE FROM t1 WHERE rowid IN
(SELECT rowid FROM t1 ORDER BY random() LIMIT 20000);
}
puts {SELECT count(*) FROM t1;}

View File

@@ -1,90 +0,0 @@
#!/bin/bash
#
# This is a template for a script used for day-to-day size and
# performance monitoring of SQLite. Typical usage:
#
# sh run-speed-test.sh trunk # Baseline measurement of trunk
# sh run-speed-test.sh x1 # Measure some experimental change
# fossil test-diff --tk cout-trunk.txt cout-x1.txt # View changes
#
# There are multiple output files, all with a base name given by
# the first argument:
#
# summary-$BASE.txt # Copy of standard output
# cout-$BASE.txt # cachegrind output
# explain-$BASE.txt # EXPLAIN listings (only with --explain)
#
if test "$1" = ""
then
echo "Usage: $0 OUTPUTFILE [OPTIONS]"
exit
fi
NAME=$1
shift
CC_OPTS="-DSQLITE_ENABLE_RTREE -DSQLITE_ENABLE_MEMSYS5"
SPEEDTEST_OPTS="--shrink-memory --reprepare --heap 10000000 64"
SIZE=5
doExplain=0
while test "$1" != ""; do
case $1 in
--reprepare)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS $1"
;;
--autovacuum)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS $1"
;;
--utf16be)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS $1"
;;
--stats)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS $1"
;;
--without-rowid)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS $1"
;;
--nomemstat)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS $1"
;;
--wal)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS --journal wal"
;;
--size)
shift; SIZE=$1
;;
--explain)
doExplain=1
;;
--heap)
CC_OPTS="$CC_OPTS -DSQLITE_ENABLE_MEMSYS5"
shift;
SPEEDTEST_OPTS="$SPEEDTEST_OPTS --heap $1 64"
;;
*)
CC_OPTS="$CC_OPTS $1"
;;
esac
shift
done
SPEEDTEST_OPTS="$SPEEDTEST_OPTS --size $SIZE"
echo "NAME = $NAME" | tee summary-$NAME.txt
echo "SPEEDTEST_OPTS = $SPEEDTEST_OPTS" | tee -a summary-$NAME.txt
echo "CC_OPTS = $CC_OPTS" | tee -a summary-$NAME.txt
rm -f cachegrind.out.* speedtest1 speedtest1.db sqlite3.o
gcc -g -Os -Wall -I. $CC_OPTS -c sqlite3.c
size sqlite3.o | tee -a summary-$NAME.txt
if test $doExplain -eq 1; then
gcc -g -Os -Wall -I. $CC_OPTS \
-DSQLITE_ENABLE_EXPLAIN_COMMENTS \
./shell.c ./sqlite3.c -o sqlite3 -ldl -lpthread
fi
SRC=./speedtest1.c
gcc -g -Os -Wall -I. $CC_OPTS $SRC ./sqlite3.o -o speedtest1 -ldl -lpthread
ls -l speedtest1 | tee -a summary-$NAME.txt
valgrind --tool=cachegrind ./speedtest1 speedtest1.db \
$SPEEDTEST_OPTS 2>&1 | tee -a summary-$NAME.txt
size sqlite3.o | tee -a summary-$NAME.txt
wc sqlite3.c
cg_anno.tcl cachegrind.out.* >cout-$NAME.txt
if test $doExplain -eq 1; then
./speedtest1 --explain $SPEEDTEST_OPTS | ./sqlite3 >explain-$NAME.txt
fi

View File

@@ -1,219 +0,0 @@
#!/bin/bash
#
# This is a template for a script used for day-to-day size and
# performance monitoring of SQLite. Typical usage:
#
# sh speed-check.sh trunk # Baseline measurement of trunk
# sh speed-check.sh x1 # Measure some experimental change
# fossil xdiff --tk cout-trunk.txt cout-x1.txt # View changes
#
# There are multiple output files, all with a base name given by
# the first argument:
#
# summary-$BASE.txt # Copy of standard output
# cout-$BASE.txt # cachegrind output
# explain-$BASE.txt # EXPLAIN listings (only with --explain)
#
if test "$1" = ""
then
echo "Usage: $0 OUTPUTFILE [OPTIONS]"
exit
fi
NAME=$1
shift
#CC_OPTS="-DSQLITE_ENABLE_RTREE -DSQLITE_ENABLE_MEMSYS5"
CC_OPTS="-DSQLITE_ENABLE_MEMSYS5"
CC=gcc
SPEEDTEST_OPTS="--shrink-memory --reprepare --stats --heap 10000000 64"
SIZE=5
LEAN_OPTS="-DSQLITE_THREADSAFE=0"
LEAN_OPTS="$LEAN_OPTS -DSQLITE_DEFAULT_MEMSTATUS=0"
LEAN_OPTS="$LEAN_OPTS -DSQLITE_DEFAULT_WAL_SYNCHRONOUS=1"
LEAN_OPTS="$LEAN_OPTS -DSQLITE_LIKE_DOESNT_MATCH_BLOBS"
LEAN_OPTS="$LEAN_OPTS -DSQLITE_MAX_EXPR_DEPTH=0"
LEAN_OPTS="$LEAN_OPTS -DSQLITE_OMIT_DECLTYPE"
LEAN_OPTS="$LEAN_OPTS -DSQLITE_OMIT_DEPRECATED"
LEAN_OPTS="$LEAN_OPTS -DSQLITE_OMIT_PROGRESS_CALLBACK"
LEAN_OPTS="$LEAN_OPTS -DSQLITE_OMIT_SHARED_CACHE"
LEAN_OPTS="$LEAN_OPTS -DSQLITE_USE_ALLOCA"
BASELINE="trunk"
doExplain=0
doCachegrind=1
doVdbeProfile=0
doWal=1
doDiff=1
while test "$1" != ""; do
case $1 in
--nodiff)
doDiff=0
;;
--reprepare)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS $1"
;;
--autovacuum)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS $1"
;;
--utf16be)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS $1"
;;
--stats)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS $1"
;;
--without-rowid)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS $1"
;;
--strict)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS $1"
;;
--nomemstat)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS $1"
;;
--multithread)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS $1"
;;
--singlethread)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS $1"
;;
--serialized)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS $1"
;;
--temp)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS --temp 6"
;;
--legacy)
doWal=0
CC_OPTS="$CC_OPTS -DSPEEDTEST_OMIT_HASH"
;;
--verify)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS --verify"
;;
--wal)
doWal=1
;;
--size)
shift; SIZE=$1
;;
--cachesize)
shift; SPEEDTEST_OPTS="$SPEEDTEST_OPTS --cachesize $1"
;;
--stmtcache)
shift; SPEEDTEST_OPTS="$SPEEDTEST_OPTS --stmtcache $1"
;;
--checkpoint)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS --checkpoint"
;;
--explain)
doExplain=1
;;
--vdbeprofile)
rm -f vdbe_profile.out
CC_OPTS="$CC_OPTS -DVDBE_PROFILE"
doCachegrind=0
doVdbeProfile=1
;;
--lean)
CC_OPTS="$CC_OPTS $LEAN_OPTS"
;;
--clang)
CC=clang
;;
--icc)
CC=/home/drh/intel/bin/icc
;;
--gcc7)
CC=gcc-7
;;
--heap)
CC_OPTS="$CC_OPTS -DSQLITE_ENABLE_MEMSYS5"
shift;
SPEEDTEST_OPTS="$SPEEDTEST_OPTS --heap $1 64"
;;
--lookaside)
shift;
SPEEDTEST_OPTS="$SPEEDTEST_OPTS --lookaside $1 $2"
shift;
;;
--repeat)
CC_OPTS="$CC_OPTS -DSQLITE_ENABLE_RCACHE"
shift;
SPEEDTEST_OPTS="$SPEEDTEST_OPTS --repeat $1"
;;
--mmap)
shift;
SPEEDTEST_OPTS="$SPEEDTEST_OPTS --mmap $1"
;;
--rtree)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS --testset rtree"
CC_OPTS="$CC_OPTS -DSQLITE_ENABLE_RTREE"
;;
--persist)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS --persist"
;;
--orm)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS --testset orm"
;;
--cte)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS --testset cte"
;;
--fp)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS --testset fp"
;;
--parsenumber)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS --testset parsenumber"
;;
--stmtscanstatus)
SPEEDTEST_OPTS="$SPEEDTEST_OPTS --stmtscanstatus"
;;
-*)
CC_OPTS="$CC_OPTS $1"
;;
*)
BASELINE=$1
;;
esac
shift
done
if test $doWal -eq 1; then
SPEEDTEST_OPTS="$SPEEDTEST_OPTS --journal wal"
fi
SPEEDTEST_OPTS="$SPEEDTEST_OPTS --size $SIZE"
echo "NAME = $NAME" | tee summary-$NAME.txt
echo "SPEEDTEST_OPTS = $SPEEDTEST_OPTS" | tee -a summary-$NAME.txt
echo "CC_OPTS = $CC_OPTS" | tee -a summary-$NAME.txt
rm -f cachegrind.out.* speedtest1 speedtest1.db sqlite3.o
if test $doVdbeProfile -eq 1; then
rm -f vdbe_profile.out
fi
$CC -g -Os -Wall -I. $CC_OPTS -c sqlite3.c
size sqlite3.o | tee -a summary-$NAME.txt
if test $doExplain -eq 1; then
$CC -g -Os -Wall -I. $CC_OPTS \
-DSQLITE_ENABLE_EXPLAIN_COMMENTS \
./shell.c ./sqlite3.c -o sqlite3 -ldl -lpthread
fi
SRC=./speedtest1.c
$CC -g -Os -Wall -I. $CC_OPTS $SRC ./sqlite3.o -o speedtest1 -ldl -lpthread
ls -l speedtest1 | tee -a summary-$NAME.txt
if test $doCachegrind -eq 1; then
valgrind --tool=cachegrind ./speedtest1 speedtest1.db \
$SPEEDTEST_OPTS 2>&1 | tee -a summary-$NAME.txt
else
./speedtest1 speedtest1.db $SPEEDTEST_OPTS 2>&1 | tee -a summary-$NAME.txt
fi
size sqlite3.o | tee -a summary-$NAME.txt
wc sqlite3.c
if test $doCachegrind -eq 1; then
cg_anno.tcl cachegrind.out.* >cout-$NAME.txt
echo '*****************************************************' >>cout-$NAME.txt
sed 's/^[0-9=-]\{9\}/==00000==/' summary-$NAME.txt >>cout-$NAME.txt
fi
if test $doExplain -eq 1; then
./speedtest1 --explain $SPEEDTEST_OPTS | ./sqlite3 >explain-$NAME.txt
fi
if test $doVdbeProfile -eq 1; then
tclsh ../sqlite/tool/vdbe_profile.tcl >vdbeprofile-$NAME.txt
open vdbeprofile-$NAME.txt
fi
if test "$NAME" != "$BASELINE" -a $doVdbeProfile -ne 1 -a $doDiff -ne 0; then
fossil test-diff --tk -c 20 cout-$BASELINE.txt cout-$NAME.txt
fi

View File

@@ -1,275 +0,0 @@
#!/usr/bin/tclsh
#
# Run this script using TCLSH to do a speed comparison between
# various versions of SQLite and PostgreSQL and MySQL
#
# Run a test
#
set cnt 1
proc runtest {title} {
global cnt
set sqlfile test$cnt.sql
puts "<h2>Test $cnt: $title</h2>"
incr cnt
set fd [open $sqlfile r]
set sql [string trim [read $fd [file size $sqlfile]]]
close $fd
set sx [split $sql \n]
set n [llength $sx]
if {$n>8} {
set sql {}
for {set i 0} {$i<3} {incr i} {append sql [lindex $sx $i]<br>\n}
append sql "<i>... [expr {$n-6}] lines omitted</i><br>\n"
for {set i [expr {$n-3}]} {$i<$n} {incr i} {
append sql [lindex $sx $i]<br>\n
}
} else {
regsub -all \n [string trim $sql] <br> sql
}
puts "<blockquote>"
puts "$sql"
puts "</blockquote><table border=0 cellpadding=0 cellspacing=0>"
set format {<tr><td>%s</td><td align="right">&nbsp;&nbsp;&nbsp;%.3f</td></tr>}
set delay 1000
# exec sync; after $delay;
# set t [time "exec psql drh <$sqlfile" 1]
# set t [expr {[lindex $t 0]/1000000.0}]
# puts [format $format PostgreSQL: $t]
exec sync; after $delay;
set t [time "exec mysql -f drh <$sqlfile" 1]
set t [expr {[lindex $t 0]/1000000.0}]
puts [format $format MySQL: $t]
# set t [time "exec ./sqlite232 s232.db <$sqlfile" 1]
# set t [expr {[lindex $t 0]/1000000.0}]
# puts [format $format {SQLite 2.3.2:} $t]
# set t [time "exec ./sqlite-100 s100.db <$sqlfile" 1]
# set t [expr {[lindex $t 0]/1000000.0}]
# puts [format $format {SQLite 2.4 (cache=100):} $t]
exec sync; after $delay;
set t [time "exec ./sqlite248 s2k.db <$sqlfile" 1]
set t [expr {[lindex $t 0]/1000000.0}]
puts [format $format {SQLite 2.4.8:} $t]
exec sync; after $delay;
set t [time "exec ./sqlite248 sns.db <$sqlfile" 1]
set t [expr {[lindex $t 0]/1000000.0}]
puts [format $format {SQLite 2.4.8 (nosync):} $t]
exec sync; after $delay;
set t [time "exec ./sqlite2412 s2kb.db <$sqlfile" 1]
set t [expr {[lindex $t 0]/1000000.0}]
puts [format $format {SQLite 2.4.12:} $t]
exec sync; after $delay;
set t [time "exec ./sqlite2412 snsb.db <$sqlfile" 1]
set t [expr {[lindex $t 0]/1000000.0}]
puts [format $format {SQLite 2.4.12 (nosync):} $t]
# set t [time "exec ./sqlite-t1 st1.db <$sqlfile" 1]
# set t [expr {[lindex $t 0]/1000000.0}]
# puts [format $format {SQLite 2.4 (test):} $t]
puts "</table>"
}
# Initialize the environment
#
expr srand(1)
catch {exec /bin/sh -c {rm -f s*.db}}
set fd [open clear.sql w]
puts $fd {
drop table t1;
drop table t2;
}
close $fd
catch {exec psql drh <clear.sql}
catch {exec mysql drh <clear.sql}
set fd [open 2kinit.sql w]
puts $fd {
PRAGMA default_cache_size=2000;
PRAGMA default_synchronous=on;
}
close $fd
exec ./sqlite248 s2k.db <2kinit.sql
exec ./sqlite2412 s2kb.db <2kinit.sql
set fd [open nosync-init.sql w]
puts $fd {
PRAGMA default_cache_size=2000;
PRAGMA default_synchronous=off;
}
close $fd
exec ./sqlite248 sns.db <nosync-init.sql
exec ./sqlite2412 snsb.db <nosync-init.sql
set ones {zero one two three four five six seven eight nine
ten eleven twelve thirteen fourteen fifteen sixteen seventeen
eighteen nineteen}
set tens {{} ten twenty thirty forty fifty sixty seventy eighty ninety}
proc number_name {n} {
if {$n>=1000} {
set txt "[number_name [expr {$n/1000}]] thousand"
set n [expr {$n%1000}]
} else {
set txt {}
}
if {$n>=100} {
append txt " [lindex $::ones [expr {$n/100}]] hundred"
set n [expr {$n%100}]
}
if {$n>=20} {
append txt " [lindex $::tens [expr {$n/10}]]"
set n [expr {$n%10}]
}
if {$n>0} {
append txt " [lindex $::ones $n]"
}
set txt [string trim $txt]
if {$txt==""} {set txt zero}
return $txt
}
set fd [open test$cnt.sql w]
puts $fd "CREATE TABLE t1(a INTEGER, b INTEGER, c VARCHAR(100));"
for {set i 1} {$i<=1000} {incr i} {
set r [expr {int(rand()*100000)}]
puts $fd "INSERT INTO t1 VALUES($i,$r,'[number_name $r]');"
}
close $fd
runtest {1000 INSERTs}
set fd [open test$cnt.sql w]
puts $fd "BEGIN;"
puts $fd "CREATE TABLE t2(a INTEGER, b INTEGER, c VARCHAR(100));"
for {set i 1} {$i<=25000} {incr i} {
set r [expr {int(rand()*500000)}]
puts $fd "INSERT INTO t2 VALUES($i,$r,'[number_name $r]');"
}
puts $fd "COMMIT;"
close $fd
runtest {25000 INSERTs in a transaction}
set fd [open test$cnt.sql w]
for {set i 0} {$i<100} {incr i} {
set lwr [expr {$i*100}]
set upr [expr {($i+10)*100}]
puts $fd "SELECT count(*), avg(b) FROM t2 WHERE b>=$lwr AND b<$upr;"
}
close $fd
runtest {100 SELECTs without an index}
set fd [open test$cnt.sql w]
for {set i 1} {$i<=100} {incr i} {
puts $fd "SELECT count(*), avg(b) FROM t2 WHERE c LIKE '%[number_name $i]%';"
}
close $fd
runtest {100 SELECTs on a string comparison}
set fd [open test$cnt.sql w]
puts $fd {CREATE INDEX i2a ON t2(a);}
puts $fd {CREATE INDEX i2b ON t2(b);}
close $fd
runtest {Creating an index}
set fd [open test$cnt.sql w]
for {set i 0} {$i<5000} {incr i} {
set lwr [expr {$i*100}]
set upr [expr {($i+1)*100}]
puts $fd "SELECT count(*), avg(b) FROM t2 WHERE b>=$lwr AND b<$upr;"
}
close $fd
runtest {5000 SELECTs with an index}
set fd [open test$cnt.sql w]
puts $fd "BEGIN;"
for {set i 0} {$i<1000} {incr i} {
set lwr [expr {$i*10}]
set upr [expr {($i+1)*10}]
puts $fd "UPDATE t1 SET b=b*2 WHERE a>=$lwr AND a<$upr;"
}
puts $fd "COMMIT;"
close $fd
runtest {1000 UPDATEs without an index}
set fd [open test$cnt.sql w]
puts $fd "BEGIN;"
for {set i 1} {$i<=25000} {incr i} {
set r [expr {int(rand()*500000)}]
puts $fd "UPDATE t2 SET b=$r WHERE a=$i;"
}
puts $fd "COMMIT;"
close $fd
runtest {25000 UPDATEs with an index}
set fd [open test$cnt.sql w]
puts $fd "BEGIN;"
for {set i 1} {$i<=25000} {incr i} {
set r [expr {int(rand()*500000)}]
puts $fd "UPDATE t2 SET c='[number_name $r]' WHERE a=$i;"
}
puts $fd "COMMIT;"
close $fd
runtest {25000 text UPDATEs with an index}
set fd [open test$cnt.sql w]
puts $fd "BEGIN;"
puts $fd "INSERT INTO t1 SELECT * FROM t2;"
puts $fd "INSERT INTO t2 SELECT * FROM t1;"
puts $fd "COMMIT;"
close $fd
runtest {INSERTs from a SELECT}
set fd [open test$cnt.sql w]
puts $fd {DELETE FROM t2 WHERE c LIKE '%fifty%';}
close $fd
runtest {DELETE without an index}
set fd [open test$cnt.sql w]
puts $fd {DELETE FROM t2 WHERE a>10 AND a<20000;}
close $fd
runtest {DELETE with an index}
set fd [open test$cnt.sql w]
puts $fd {INSERT INTO t2 SELECT * FROM t1;}
close $fd
runtest {A big INSERT after a big DELETE}
set fd [open test$cnt.sql w]
puts $fd {BEGIN;}
puts $fd {DELETE FROM t1;}
for {set i 1} {$i<=3000} {incr i} {
set r [expr {int(rand()*100000)}]
puts $fd "INSERT INTO t1 VALUES($i,$r,'[number_name $r]');"
}
puts $fd {COMMIT;}
close $fd
runtest {A big DELETE followed by many small INSERTs}
set fd [open test$cnt.sql w]
puts $fd {DROP TABLE t1;}
puts $fd {DROP TABLE t2;}
close $fd
runtest {DROP TABLE}

View File

@@ -1,171 +0,0 @@
/*
** Performance test for SQLite.
**
** This program reads ASCII text from a file named on the command-line.
** It converts each SQL statement into UTF16 and submits it to SQLite
** for evaluation. A new UTF16 database is created at the beginning of
** the program. All statements are timed using the high-resolution timer
** built into Intel-class processors.
**
** To compile this program, first compile the SQLite library separately
** will full optimizations. For example:
**
** gcc -c -O6 -DSQLITE_THREADSAFE=0 sqlite3.c
**
** Then link against this program. But to do optimize this program
** because that defeats the hi-res timer.
**
** gcc speedtest16.c sqlite3.o -ldl -I../src
**
** Then run this program with a single argument which is the name of
** a file containing SQL script that you want to test:
**
** ./a.out database.db test.sql
*/
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <ctype.h>
#include <unistd.h>
#include "sqlite3.h"
#define ISSPACE(X) isspace((unsigned char)(X))
/*
** hwtime.h contains inline assembler code for implementing
** high-performance timing routines.
*/
#include "hwtime.h"
/*
** Convert a zero-terminated ASCII string into a zero-terminated
** UTF-16le string. Memory to hold the returned string comes
** from malloc() and should be freed by the caller.
*/
static void *asciiToUtf16le(const char *z){
int n = strlen(z);
char *z16;
int i, j;
z16 = malloc( n*2 + 2 );
for(i=j=0; i<=n; i++){
z16[j++] = z[i];
z16[j++] = 0;
}
return (void*)z16;
}
/*
** Timers
*/
static sqlite_uint64 prepTime = 0;
static sqlite_uint64 runTime = 0;
static sqlite_uint64 finalizeTime = 0;
/*
** Prepare and run a single statement of SQL.
*/
static void prepareAndRun(sqlite3 *db, const char *zSql){
void *utf16;
sqlite3_stmt *pStmt;
const void *stmtTail;
sqlite_uint64 iStart, iElapse;
int rc;
printf("****************************************************************\n");
printf("SQL statement: [%s]\n", zSql);
utf16 = asciiToUtf16le(zSql);
iStart = sqlite3Hwtime();
rc = sqlite3_prepare16_v2(db, utf16, -1, &pStmt, &stmtTail);
iElapse = sqlite3Hwtime() - iStart;
prepTime += iElapse;
printf("sqlite3_prepare16_v2() returns %d in %llu cycles\n", rc, iElapse);
if( rc==SQLITE_OK ){
int nRow = 0;
iStart = sqlite3Hwtime();
while( (rc=sqlite3_step(pStmt))==SQLITE_ROW ){ nRow++; }
iElapse = sqlite3Hwtime() - iStart;
runTime += iElapse;
printf("sqlite3_step() returns %d after %d rows in %llu cycles\n",
rc, nRow, iElapse);
iStart = sqlite3Hwtime();
rc = sqlite3_finalize(pStmt);
iElapse = sqlite3Hwtime() - iStart;
finalizeTime += iElapse;
printf("sqlite3_finalize() returns %d in %llu cycles\n", rc, iElapse);
}
free(utf16);
}
int main(int argc, char **argv){
void *utf16;
sqlite3 *db;
int rc;
int nSql;
char *zSql;
int i, j;
FILE *in;
sqlite_uint64 iStart, iElapse;
sqlite_uint64 iSetup = 0;
int nStmt = 0;
int nByte = 0;
if( argc!=3 ){
fprintf(stderr, "Usage: %s FILENAME SQL-SCRIPT\n"
"Runs SQL-SCRIPT as UTF16 against a UTF16 database\n",
argv[0]);
exit(1);
}
in = fopen(argv[2], "r");
fseek(in, 0L, SEEK_END);
nSql = ftell(in);
zSql = malloc( nSql+1 );
fseek(in, 0L, SEEK_SET);
nSql = fread(zSql, 1, nSql, in);
zSql[nSql] = 0;
printf("SQLite version: %d\n", sqlite3_libversion_number());
unlink(argv[1]);
utf16 = asciiToUtf16le(argv[1]);
iStart = sqlite3Hwtime();
rc = sqlite3_open16(utf16, &db);
iElapse = sqlite3Hwtime() - iStart;
iSetup = iElapse;
printf("sqlite3_open16() returns %d in %llu cycles\n", rc, iElapse);
free(utf16);
for(i=j=0; j<nSql; j++){
if( zSql[j]==';' ){
int isComplete;
char c = zSql[j+1];
zSql[j+1] = 0;
isComplete = sqlite3_complete(&zSql[i]);
zSql[j+1] = c;
if( isComplete ){
zSql[j] = 0;
while( i<j && ISSPACE(zSql[i]) ){ i++; }
if( i<j ){
nStmt++;
nByte += j-i;
prepareAndRun(db, &zSql[i]);
}
zSql[j] = ';';
i = j+1;
}
}
}
iStart = sqlite3Hwtime();
sqlite3_close(db);
iElapse = sqlite3Hwtime() - iStart;
iSetup += iElapse;
printf("sqlite3_close() returns in %llu cycles\n", iElapse);
printf("\n");
printf("Statements run: %15d\n", nStmt);
printf("Bytes of SQL text: %15d\n", nByte);
printf("Total prepare time: %15llu cycles\n", prepTime);
printf("Total run time: %15llu cycles\n", runTime);
printf("Total finalize time: %15llu cycles\n", finalizeTime);
printf("Open/Close time: %15llu cycles\n", iSetup);
printf("Total Time: %15llu cycles\n",
prepTime + runTime + finalizeTime + iSetup);
return 0;
}

View File

@@ -1,207 +0,0 @@
#!/usr/bin/tclsh
#
# Run this script using TCLSH to do a speed comparison between
# various versions of SQLite and PostgreSQL and MySQL
#
# Run a test
#
set cnt 1
proc runtest {title} {
global cnt
set sqlfile test$cnt.sql
puts "<h2>Test $cnt: $title</h2>"
incr cnt
set fd [open $sqlfile r]
set sql [string trim [read $fd [file size $sqlfile]]]
close $fd
set sx [split $sql \n]
set n [llength $sx]
if {$n>8} {
set sql {}
for {set i 0} {$i<3} {incr i} {append sql [lindex $sx $i]<br>\n}
append sql "<i>... [expr {$n-6}] lines omitted</i><br>\n"
for {set i [expr {$n-3}]} {$i<$n} {incr i} {
append sql [lindex $sx $i]<br>\n
}
} else {
regsub -all \n [string trim $sql] <br> sql
}
puts "<blockquote>"
puts "$sql"
puts "</blockquote><table border=0 cellpadding=0 cellspacing=0>"
set format {<tr><td>%s</td><td align="right">&nbsp;&nbsp;&nbsp;%.3f</td></tr>}
set delay 1000
exec sync; after $delay;
set t [time "exec psql drh <$sqlfile" 1]
set t [expr {[lindex $t 0]/1000000.0}]
puts [format $format PostgreSQL: $t]
exec sync; after $delay;
set t [time "exec mysql -f drh <$sqlfile" 1]
set t [expr {[lindex $t 0]/1000000.0}]
puts [format $format MySQL: $t]
# set t [time "exec ./sqlite232 s232.db <$sqlfile" 1]
# set t [expr {[lindex $t 0]/1000000.0}]
# puts [format $format {SQLite 2.3.2:} $t]
# set t [time "exec ./sqlite-100 s100.db <$sqlfile" 1]
# set t [expr {[lindex $t 0]/1000000.0}]
# puts [format $format {SQLite 2.4 (cache=100):} $t]
exec sync; after $delay;
set t [time "exec ./sqlite240 s2k.db <$sqlfile" 1]
set t [expr {[lindex $t 0]/1000000.0}]
puts [format $format {SQLite 2.4:} $t]
exec sync; after $delay;
set t [time "exec ./sqlite240 sns.db <$sqlfile" 1]
set t [expr {[lindex $t 0]/1000000.0}]
puts [format $format {SQLite 2.4 (nosync):} $t]
# set t [time "exec ./sqlite-t1 st1.db <$sqlfile" 1]
# set t [expr {[lindex $t 0]/1000000.0}]
# puts [format $format {SQLite 2.4 (test):} $t]
puts "</table>"
}
# Initialize the environment
#
expr srand(1)
catch {exec /bin/sh -c {rm -f s*.db}}
set fd [open clear.sql w]
puts $fd {
drop table t1;
drop table t2;
}
close $fd
catch {exec psql drh <clear.sql}
catch {exec mysql drh <clear.sql}
set fd [open 2kinit.sql w]
puts $fd {
PRAGMA default_cache_size=2000;
PRAGMA default_synchronous=on;
}
close $fd
exec ./sqlite240 s2k.db <2kinit.sql
exec ./sqlite-t1 st1.db <2kinit.sql
set fd [open nosync-init.sql w]
puts $fd {
PRAGMA default_cache_size=2000;
PRAGMA default_synchronous=off;
}
close $fd
exec ./sqlite240 sns.db <nosync-init.sql
set ones {zero one two three four five six seven eight nine
ten eleven twelve thirteen fourteen fifteen sixteen seventeen
eighteen nineteen}
set tens {{} ten twenty thirty forty fifty sixty seventy eighty ninety}
proc number_name {n} {
if {$n>=1000} {
set txt "[number_name [expr {$n/1000}]] thousand"
set n [expr {$n%1000}]
} else {
set txt {}
}
if {$n>=100} {
append txt " [lindex $::ones [expr {$n/100}]] hundred"
set n [expr {$n%100}]
}
if {$n>=20} {
append txt " [lindex $::tens [expr {$n/10}]]"
set n [expr {$n%10}]
}
if {$n>0} {
append txt " [lindex $::ones $n]"
}
set txt [string trim $txt]
if {$txt==""} {set txt zero}
return $txt
}
set fd [open test$cnt.sql w]
puts $fd "BEGIN;"
puts $fd "CREATE TABLE t1(a INTEGER, b INTEGER, c VARCHAR(100));"
for {set i 1} {$i<=25000} {incr i} {
set r [expr {int(rand()*500000)}]
puts $fd "INSERT INTO t1 VALUES($i,$r,'[number_name $r]');"
}
puts $fd "COMMIT;"
close $fd
runtest {25000 INSERTs in a transaction}
set fd [open test$cnt.sql w]
puts $fd "DELETE FROM t1;"
close $fd
runtest {DELETE everything}
set fd [open test$cnt.sql w]
puts $fd "BEGIN;"
for {set i 1} {$i<=25000} {incr i} {
set r [expr {int(rand()*500000)}]
puts $fd "INSERT INTO t1 VALUES($i,$r,'[number_name $r]');"
}
puts $fd "COMMIT;"
close $fd
runtest {25000 INSERTs in a transaction}
set fd [open test$cnt.sql w]
puts $fd "DELETE FROM t1;"
close $fd
runtest {DELETE everything}
set fd [open test$cnt.sql w]
puts $fd "BEGIN;"
for {set i 1} {$i<=25000} {incr i} {
set r [expr {int(rand()*500000)}]
puts $fd "INSERT INTO t1 VALUES($i,$r,'[number_name $r]');"
}
puts $fd "COMMIT;"
close $fd
runtest {25000 INSERTs in a transaction}
set fd [open test$cnt.sql w]
puts $fd "DELETE FROM t1;"
close $fd
runtest {DELETE everything}
set fd [open test$cnt.sql w]
puts $fd "BEGIN;"
for {set i 1} {$i<=25000} {incr i} {
set r [expr {int(rand()*500000)}]
puts $fd "INSERT INTO t1 VALUES($i,$r,'[number_name $r]');"
}
puts $fd "COMMIT;"
close $fd
runtest {25000 INSERTs in a transaction}
set fd [open test$cnt.sql w]
puts $fd "DELETE FROM t1;"
close $fd
runtest {DELETE everything}
set fd [open test$cnt.sql w]
puts $fd "BEGIN;"
for {set i 1} {$i<=25000} {incr i} {
set r [expr {int(rand()*500000)}]
puts $fd "INSERT INTO t1 VALUES($i,$r,'[number_name $r]');"
}
puts $fd "COMMIT;"
close $fd
runtest {25000 INSERTs in a transaction}
set fd [open test$cnt.sql w]
puts $fd "DELETE FROM t1;"
close $fd
runtest {DELETE everything}
set fd [open test$cnt.sql w]
puts $fd {DROP TABLE t1;}
close $fd
runtest {DROP TABLE}

View File

@@ -1,260 +0,0 @@
/*
** Performance test for SQLite.
**
** This program reads ASCII text from a file named on the command-line
** and submits that text to SQLite for evaluation. A new database
** is created at the beginning of the program. All statements are
** timed using the high-resolution timer built into Intel-class processors.
**
** To compile this program, first compile the SQLite library separately
** will full optimizations. For example:
**
** gcc -c -O6 -DSQLITE_THREADSAFE=0 sqlite3.c
**
** Then link against this program. But to do optimize this program
** because that defeats the hi-res timer.
**
** gcc speedtest8.c sqlite3.o -ldl -I../src
**
** Then run this program with a single argument which is the name of
** a file containing SQL script that you want to test:
**
** ./a.out test.db test.sql
*/
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <ctype.h>
#include <time.h>
#if defined(_MSC_VER)
#include <windows.h>
#else
#include <unistd.h>
#include <sys/times.h>
#include <sched.h>
#endif
#include "sqlite3.h"
/*
** hwtime.h contains inline assembler code for implementing
** high-performance timing routines.
*/
#include "hwtime.h"
/*
** Timers
*/
static sqlite_uint64 prepTime = 0;
static sqlite_uint64 runTime = 0;
static sqlite_uint64 finalizeTime = 0;
/*
** Prepare and run a single statement of SQL.
*/
static void prepareAndRun(sqlite3 *db, const char *zSql, int bQuiet){
sqlite3_stmt *pStmt;
const char *stmtTail;
sqlite_uint64 iStart, iElapse;
int rc;
if (!bQuiet){
printf("***************************************************************\n");
}
if (!bQuiet) printf("SQL statement: [%s]\n", zSql);
iStart = sqlite3Hwtime();
rc = sqlite3_prepare_v2(db, zSql, -1, &pStmt, &stmtTail);
iElapse = sqlite3Hwtime() - iStart;
prepTime += iElapse;
if (!bQuiet){
printf("sqlite3_prepare_v2() returns %d in %llu cycles\n", rc, iElapse);
}
if( rc==SQLITE_OK ){
int nRow = 0;
iStart = sqlite3Hwtime();
while( (rc=sqlite3_step(pStmt))==SQLITE_ROW ){ nRow++; }
iElapse = sqlite3Hwtime() - iStart;
runTime += iElapse;
if (!bQuiet){
printf("sqlite3_step() returns %d after %d rows in %llu cycles\n",
rc, nRow, iElapse);
}
iStart = sqlite3Hwtime();
rc = sqlite3_finalize(pStmt);
iElapse = sqlite3Hwtime() - iStart;
finalizeTime += iElapse;
if (!bQuiet){
printf("sqlite3_finalize() returns %d in %llu cycles\n", rc, iElapse);
}
}
}
int main(int argc, char **argv){
sqlite3 *db;
int rc;
int nSql;
char *zSql;
int i, j;
FILE *in;
sqlite_uint64 iStart, iElapse;
sqlite_uint64 iSetup = 0;
int nStmt = 0;
int nByte = 0;
const char *zArgv0 = argv[0];
int bQuiet = 0;
#if !defined(_MSC_VER)
struct tms tmsStart, tmsEnd;
clock_t clkStart, clkEnd;
#endif
#ifdef HAVE_OSINST
extern sqlite3_vfs *sqlite3_instvfs_binarylog(char *, char *, char *);
extern void sqlite3_instvfs_destroy(sqlite3_vfs *);
sqlite3_vfs *pVfs = 0;
#endif
while (argc>3)
{
#ifdef HAVE_OSINST
if( argc>4 && (strcmp(argv[1], "-log")==0) ){
pVfs = sqlite3_instvfs_binarylog("oslog", 0, argv[2]);
sqlite3_vfs_register(pVfs, 1);
argv += 2;
argc -= 2;
continue;
}
#endif
/*
** Increasing the priority slightly above normal can help with
** repeatability of testing. Note that with Cygwin, -5 equates
** to "High", +5 equates to "Low", and anything in between
** equates to "Normal".
*/
if( argc>4 && (strcmp(argv[1], "-priority")==0) ){
#if defined(_MSC_VER)
int new_priority = atoi(argv[2]);
if(!SetPriorityClass(GetCurrentProcess(),
(new_priority<=-5) ? HIGH_PRIORITY_CLASS :
(new_priority<=0) ? ABOVE_NORMAL_PRIORITY_CLASS :
(new_priority==0) ? NORMAL_PRIORITY_CLASS :
(new_priority<5) ? BELOW_NORMAL_PRIORITY_CLASS :
IDLE_PRIORITY_CLASS)){
printf ("error setting priority\n");
exit(2);
}
#else
struct sched_param myParam;
sched_getparam(0, &myParam);
printf ("Current process priority is %d.\n", (int)myParam.sched_priority);
myParam.sched_priority = atoi(argv[2]);
printf ("Setting process priority to %d.\n", (int)myParam.sched_priority);
if (sched_setparam (0, &myParam) != 0){
printf ("error setting priority\n");
exit(2);
}
#endif
argv += 2;
argc -= 2;
continue;
}
if( argc>3 && strcmp(argv[1], "-quiet")==0 ){
bQuiet = -1;
argv++;
argc--;
continue;
}
break;
}
if( argc!=3 ){
fprintf(stderr, "Usage: %s [options] FILENAME SQL-SCRIPT\n"
"Runs SQL-SCRIPT against a UTF8 database\n"
"\toptions:\n"
#ifdef HAVE_OSINST
"\t-log <log>\n"
#endif
"\t-priority <value> : set priority of task\n"
"\t-quiet : only display summary results\n",
zArgv0);
exit(1);
}
in = fopen(argv[2], "r");
fseek(in, 0L, SEEK_END);
nSql = ftell(in);
zSql = malloc( nSql+1 );
fseek(in, 0L, SEEK_SET);
nSql = fread(zSql, 1, nSql, in);
zSql[nSql] = 0;
printf("SQLite version: %d\n", sqlite3_libversion_number());
unlink(argv[1]);
#if !defined(_MSC_VER)
clkStart = times(&tmsStart);
#endif
iStart = sqlite3Hwtime();
rc = sqlite3_open(argv[1], &db);
iElapse = sqlite3Hwtime() - iStart;
iSetup = iElapse;
if (!bQuiet) printf("sqlite3_open() returns %d in %llu cycles\n", rc, iElapse);
for(i=j=0; j<nSql; j++){
if( zSql[j]==';' ){
int isComplete;
char c = zSql[j+1];
zSql[j+1] = 0;
isComplete = sqlite3_complete(&zSql[i]);
zSql[j+1] = c;
if( isComplete ){
zSql[j] = 0;
while( i<j && isspace(zSql[i]) ){ i++; }
if( i<j ){
int n = j - i;
if( n>=6 && memcmp(&zSql[i], ".crash",6)==0 ) exit(1);
nStmt++;
nByte += n;
prepareAndRun(db, &zSql[i], bQuiet);
}
zSql[j] = ';';
i = j+1;
}
}
}
iStart = sqlite3Hwtime();
sqlite3_close(db);
iElapse = sqlite3Hwtime() - iStart;
#if !defined(_MSC_VER)
clkEnd = times(&tmsEnd);
#endif
iSetup += iElapse;
if (!bQuiet) printf("sqlite3_close() returns in %llu cycles\n", iElapse);
printf("\n");
printf("Statements run: %15d stmts\n", nStmt);
printf("Bytes of SQL text: %15d bytes\n", nByte);
printf("Total prepare time: %15llu cycles\n", prepTime);
printf("Total run time: %15llu cycles\n", runTime);
printf("Total finalize time: %15llu cycles\n", finalizeTime);
printf("Open/Close time: %15llu cycles\n", iSetup);
printf("Total time: %15llu cycles\n",
prepTime + runTime + finalizeTime + iSetup);
#if !defined(_MSC_VER)
printf("\n");
printf("Total user CPU time: %15.3g secs\n", (tmsEnd.tms_utime - tmsStart.tms_utime)/(double)CLOCKS_PER_SEC );
printf("Total system CPU time: %15.3g secs\n", (tmsEnd.tms_stime - tmsStart.tms_stime)/(double)CLOCKS_PER_SEC );
printf("Total real time: %15.3g secs\n", (clkEnd -clkStart)/(double)CLOCKS_PER_SEC );
#endif
#ifdef HAVE_OSINST
if( pVfs ){
sqlite3_instvfs_destroy(pVfs);
printf("vfs log written to %s\n", argv[0]);
}
#endif
return 0;
}

View File

@@ -1,218 +0,0 @@
/*
** Performance test for SQLite.
**
** This program reads ASCII text from a file named on the command-line
** and submits that text to SQLite for evaluation. A new database
** is created at the beginning of the program. All statements are
** timed using the high-resolution timer built into Intel-class processors.
**
** To compile this program, first compile the SQLite library separately
** will full optimizations. For example:
**
** gcc -c -O6 -DSQLITE_THREADSAFE=0 sqlite3.c
**
** Then link against this program. But to do optimize this program
** because that defeats the hi-res timer.
**
** gcc speedtest8.c sqlite3.o -ldl -I../src
**
** Then run this program with a single argument which is the name of
** a file containing SQL script that you want to test:
**
** ./a.out test.db test.sql
*/
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <ctype.h>
#include <unistd.h>
#include <stdarg.h>
#include "sqlite3.h"
#define ISSPACE(X) isspace((unsigned char)(X))
#include "test_osinst.c"
/*
** Prepare and run a single statement of SQL.
*/
static void prepareAndRun(sqlite3_vfs *pInstVfs, sqlite3 *db, const char *zSql){
sqlite3_stmt *pStmt;
const char *stmtTail;
int rc;
char zMessage[1024];
zMessage[1023] = '\0';
sqlite3_uint64 iTime;
sqlite3_snprintf(1023, zMessage, "sqlite3_prepare_v2: %s", zSql);
sqlite3_instvfs_binarylog_marker(pInstVfs, zMessage);
iTime = sqlite3Hwtime();
rc = sqlite3_prepare_v2(db, zSql, -1, &pStmt, &stmtTail);
iTime = sqlite3Hwtime() - iTime;
sqlite3_instvfs_binarylog_call(pInstVfs,BINARYLOG_PREPARE_V2,iTime,rc,zSql);
if( rc==SQLITE_OK ){
int nRow = 0;
sqlite3_snprintf(1023, zMessage, "sqlite3_step loop: %s", zSql);
sqlite3_instvfs_binarylog_marker(pInstVfs, zMessage);
iTime = sqlite3Hwtime();
while( (rc=sqlite3_step(pStmt))==SQLITE_ROW ){ nRow++; }
iTime = sqlite3Hwtime() - iTime;
sqlite3_instvfs_binarylog_call(pInstVfs, BINARYLOG_STEP, iTime, rc, zSql);
sqlite3_snprintf(1023, zMessage, "sqlite3_finalize: %s", zSql);
sqlite3_instvfs_binarylog_marker(pInstVfs, zMessage);
iTime = sqlite3Hwtime();
rc = sqlite3_finalize(pStmt);
iTime = sqlite3Hwtime() - iTime;
sqlite3_instvfs_binarylog_call(pInstVfs, BINARYLOG_FINALIZE, iTime, rc, zSql);
}
}
static int stringcompare(const char *zLeft, const char *zRight){
int ii;
for(ii=0; zLeft[ii] && zRight[ii]; ii++){
if( zLeft[ii]!=zRight[ii] ) return 0;
}
return( zLeft[ii]==zRight[ii] );
}
static char *readScriptFile(const char *zFile, int *pnScript){
sqlite3_vfs *pVfs = sqlite3_vfs_find(0);
sqlite3_file *p;
int rc;
sqlite3_int64 nByte;
char *zData = 0;
int flags = SQLITE_OPEN_READONLY|SQLITE_OPEN_MAIN_DB;
p = (sqlite3_file *)malloc(pVfs->szOsFile);
rc = pVfs->xOpen(pVfs, zFile, p, flags, &flags);
if( rc!=SQLITE_OK ){
goto error_out;
}
rc = p->pMethods->xFileSize(p, &nByte);
if( rc!=SQLITE_OK ){
goto close_out;
}
zData = (char *)malloc(nByte+1);
rc = p->pMethods->xRead(p, zData, nByte, 0);
if( rc!=SQLITE_OK ){
goto close_out;
}
zData[nByte] = '\0';
p->pMethods->xClose(p);
free(p);
*pnScript = nByte;
return zData;
close_out:
p->pMethods->xClose(p);
error_out:
free(p);
free(zData);
return 0;
}
int main(int argc, char **argv){
const char zUsageMsg[] =
"Usage: %s options...\n"
" where available options are:\n"
"\n"
" -db DATABASE-FILE (database file to operate on)\n"
" -script SCRIPT-FILE (script file to read sql from)\n"
" -log LOG-FILE (log file to create)\n"
" -logdata (log all data to log file)\n"
"\n"
" Options -db, -script and -log are compulsory\n"
"\n"
;
const char *zDb = 0;
const char *zScript = 0;
const char *zLog = 0;
int logdata = 0;
int ii;
int i, j;
int rc;
sqlite3_vfs *pInstVfs; /* Instrumentation VFS */
char *zSql = 0;
int nSql;
sqlite3 *db;
for(ii=1; ii<argc; ii++){
if( stringcompare("-db", argv[ii]) && (ii+1)<argc ){
zDb = argv[++ii];
}
else if( stringcompare("-script", argv[ii]) && (ii+1)<argc ){
zScript = argv[++ii];
}
else if( stringcompare("-log", argv[ii]) && (ii+1)<argc ){
zLog = argv[++ii];
}
else if( stringcompare("-logdata", argv[ii]) ){
logdata = 1;
}
else {
goto usage;
}
}
if( !zDb || !zScript || !zLog ) goto usage;
zSql = readScriptFile(zScript, &nSql);
if( !zSql ){
fprintf(stderr, "Failed to read script file\n");
return -1;
}
pInstVfs = sqlite3_instvfs_binarylog("logging", 0, zLog, logdata);
rc = sqlite3_open_v2(
zDb, &db, SQLITE_OPEN_READWRITE | SQLITE_OPEN_CREATE, "logging"
);
if( rc!=SQLITE_OK ){
fprintf(stderr, "Failed to open db: %s\n", sqlite3_errmsg(db));
return -2;
}
for(i=j=0; j<nSql; j++){
if( zSql[j]==';' ){
int isComplete;
char c = zSql[j+1];
zSql[j+1] = 0;
isComplete = sqlite3_complete(&zSql[i]);
zSql[j+1] = c;
if( isComplete ){
zSql[j] = 0;
while( i<j && ISSPACE(zSql[i]) ){ i++; }
if( i<j ){
prepareAndRun(pInstVfs, db, &zSql[i]);
}
zSql[j] = ';';
i = j+1;
}
}
}
sqlite3_instvfs_destroy(pInstVfs);
return 0;
usage:
fprintf(stderr, zUsageMsg, argv[0]);
return -3;
}

File diff suppressed because it is too large Load Diff