Commit 49a17a29 authored by Ajay Patel's avatar Ajay Patel

Initial commit

parents
Pipeline #36017298 canceled with stages
in 2 minutes and 55 seconds
[flake8]
ignore = E261,E402,W504
exclude = .git,__pycache__,docs/source/conf.py,old,build,dist,setup.py,supersqlite/third_party,supersqlite/third_party_mock,benchmark.py,supersqlite/req_wheels
max-line-length = 80
\ No newline at end of file
# Compiled python modules.
*.pyc
# Setuptools distribution folder.
/build/
/dist/
MANIFEST
# Python egg metadata, regenerated from source files by setuptools.
/supersqlite-*
/*.egg-info
/*.egg
/.eggs/
# Third Party Folders
/supersqlite/third_party/_apsw/src/sqlite3.h
/supersqlite/third_party/_apsw/src/sqlite3.c
# Third Party Build Folders
/supersqlite/third_party/**/build
/supersqlite/third_party/internal/**/*
/supersqlite/third_party/internal/*
!/supersqlite/third_party/internal/__init__.py
# Wheel Folder
/supersqlite/req_wheels/**/*
/supersqlite/req_wheels/*
!/supersqlite/req_wheels/.gitkeep
# Weird files

.DS_Store
**/.DS_Store
\ No newline at end of file
image: registry.gitlab.com/plasticity/ubuntu-dind:16.04
services:
- docker:dind
stages:
- Test
- Deploy to PyPI
- Deploy Linux Wheels
before_script:
# Setup package manager
- apt-get update 2>&1 >/dev/null
- apt-get install curl wget git -y
- apt-get install libblas-dev liblapack-dev libatlas-base-dev gfortran -y
- apt-get install python -y
- apt-get install python-pip -y
- apt-get install python-setuptools -y
- apt-get install python3-setuptools python3-dev -y
- apt-get install python-dev -y
- apt-get install openssh-server -y
# Setup SSH configuration
- mkdir -p ~/.ssh
- echo -e "Host *\n\tStrictHostKeyChecking no\n\n" > ~/.ssh/config
# Install gettext for envsubst
- apt-get install gettext -y
Lint:
stage: Test
script:
- apt-get install python3 -y
- apt-get install python3-pip -y
- pip3 install flake8
- pip3 install pep8-naming
- flake8 --version
- flake8
Test Python 2:
stage: Test
script:
- apt-get install python-pip -y
- pip2 install -U pip setuptools
- python2 setup.py install
- pip install gensim
- python2 -m tests.tests -- -v
Test Python 3:
stage: Test
script:
- apt-get install build-essential -y
- apt-get install libffi-dev libssl-dev zlib1g-dev libncurses5-dev libncursesw5-dev libreadline-dev libsqlite3-dev -y
- apt-get install libgdbm-dev libdb5.3-dev libbz2-dev libexpat1-dev liblzma-dev tk-dev -y
- wget https://www.python.org/ftp/python/3.7.1/Python-3.7.1.tar.xz
- tar xf Python-3.7.1.tar.xz
- cd Python-3.7.1 && ./configure --enable-shared && make -j 8 && make altinstall && cd ../
- echo $(pwd)/Python-3.7.1
- ls $(pwd)/Python-3.7.1
- export LD_LIBRARY_PATH=$(pwd)/Python-3.7.1:$LD_LIBRARY_PATH
- echo $LD_LIBRARY_PATH
- curl https://bootstrap.pypa.io/get-pip.py | python3.7
- python3.7 -m pip install -U wheel setuptools # TEMP: removing upgrading of pip here, because the newest one is broken for Python 3
- python3.7 setup.py install
- python3.7 -m pip install gensim
- python3.7 -m tests.tests -- -v
Deploy to PyPI:
stage: Deploy to PyPI
script:
# Add GitLab SSH private deploy key
- eval $(ssh-agent -s)
- tmpfile=$(mktemp ~/pk.XXXXXX)
- echo "$SSH_PRIVATE_KEY" > $tmpfile
- ssh-add $tmpfile
- rm $tmpfile
# Tag the release on GitLab
- rm -rf ../tagger
- mkdir -p ../tagger
- cd ../tagger
- git clone [email protected]:$CI_PROJECT_PATH.git
- cd $CI_PROJECT_NAME
- git config --global user.email "[email protected]"
- git config --global user.name "Plasticity Developer Bot"
- git tag -a $(python setup.py -V) -m "Release $(python setup.py -V)"
- git push origin --tags
- cd $CI_PROJECT_DIR
- rm -rf ../tagger
- sleep 60 # Wait for GitLab to mirror to GitHub
# Create a release on GitHub
- curl -u plasticity-admin:$GITHUB_TOKEN -d "{\"tag_name\":\"$(python setup.py -V)\", \"name\":\"Release $(python setup.py -V)\"}" -H "Content-Type:"" application/json" -X POST https://api.github.com/repos/plasticityai/$CI_PROJECT_NAME/releases
# Upload to PyPI
- envsubst < deployment/.pypirc > ~/.pypirc
- chmod 600 ~/.pypirc
- python setup.py sdist upload -r pypitest
- python setup.py sdist upload -r pypi
# Trigger Travis CI
- curl -s -X POST -H "Content-Type:application/json" -H "Accept:application/json" -H "Travis-API-Version:3" -H "Authorization:token $TRAVIS_CI_TOKEN" -d '{"request":{"branch":"master"}}' https://api.travis-ci.org/repo/plasticityai%2Fsupersqlite/requests
# Trigger Appveyor CI
- curl -s -X POST -H "Content-Type:application/json" -H "Accept:application/json" -H "Authorization:Bearer $APPVEYOR_CI_TOKEN" -d '{"accountName":"plasticity-admin","projectSlug":"supersqlite","branch":"master"}' https://ci.appveyor.com/api/builds
only:
- master
Deploy Linux Wheels:
stage: Deploy Linux Wheels
script:
- export SHARED_PATH=/builds/shared/$CI_PROJECT_PATH
- mkdir -p ${SHARED_PATH}
- touch ${SHARED_PATH}/setup.py
- echo $SHARED_PATH
- cp -a $CI_PROJECT_DIR/. /$SHARED_PATH/
- mkdir -p /.awsstore
- touch /.awsstore/credentials
- echo "[default]" >> /.awsstore/credentials
- echo "aws_access_key_id = $AWS_ACCESS_KEY_ID" >> /.awsstore/credentials
- echo "aws_secret_access_key = $AWS_SECRET_ACCESS_KEY" >> /.awsstore/credentials
- eval "copytocibuildwheel() { export CIBUILDWHEEL_CONTAINER=\$(docker ps --format 'table {{.ID}}\t{{.Names}}\t{{.Image}}' | grep 'quay.io/pypa/manylinux1' | head -n1 | awk '{print \$2}') && (docker cp $CI_PROJECT_DIR/. \$CIBUILDWHEEL_CONTAINER:/project || true) && docker exec \$CIBUILDWHEEL_CONTAINER mkdir -p /root/.aws && docker cp /.awsstore/credentials \$CIBUILDWHEEL_CONTAINER:/root/.aws/credentials; }"
- eval "repeatcopytocibuildwheel() { while sleep 1; do copytocibuildwheel || (echo 'Could not locate container:' && docker ps); done }"
- export -f copytocibuildwheel
- export -f repeatcopytocibuildwheel
- export CIBW_PLATFORM=linux
- export CIBW_BUILD_VERBOSITY=3
- export CIBW_BEFORE_BUILD="echo 'Installing AWS...'; pip install awscli --upgrade; echo 'Sleeping...waiting for copy to complete...'; sleep 90; echo 'Project Directory After Copy:'; ls /project; echo 'AWS After Copy:'; ls /root/.aws/;"
- export CIBW_TEST_COMMAND="echo 'Uploading to AWS:'; ls /output; aws s3 sync /output/ s3://supersqlite.plasticity.ai/wheelhouse/"
- pip2 install pip setuptools -U
- pip2 install cibuildwheel==0.10.0
- /bin/bash -c "set -m; cibuildwheel --output-dir wheelhouse $SHARED_PATH & sleep 30 && echo 'Copying to sub-Docker container...' && repeatcopytocibuildwheel& sleep 28 && echo 'Done copying.' && fg 1"
only:
- master
language: python
env:
global:
CIBW_BUILD_VERBOSITY: 3
PIP: pip3
language: generic
os:
- linux
- osx
matrix:
exclude:
- os: linux
script:
- /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
- $PIP install pip setuptools -U
- $PIP install cibuildwheel==0.10.0
- eval "buildwheel() { cibuildwheel --output-dir wheelhouse >>buildwheel.log 2>&1; }"
- travis_wait 60 buildwheel || (tail -c 3670016 buildwheel.log && exit 1)
- tail -c 3670016 buildwheel.log
- ls wheelhouse
- $PIP install awscli --upgrade
- brew install awscli || true
- aws s3 sync ./wheelhouse/ s3://supersqlite.plasticity.ai/wheelhouse/
MIT License
Copyright (C) 2018, Plasticity Inc. [email protected]
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
\ No newline at end of file
include version.py
include glibc.py
include pep425tags.py
include LICENSE.txt
include requirements.txt
include supersqlite/third_party/*
include supersqlite/third_party/**/*
recursive-include supersqlite/third_party *
recursive-include supersqlite/third_party *.txt *.py *.c *.yml *.cfg *.h *.ini Makefile *.whl *.so *.o *.dll *.pyd .gitkeep
include supersqlite/req_wheels/*
include supersqlite/req_wheels/**/*
recursive-include supersqlite/req_wheels *
recursive-include supersqlite/req_wheels *.txt *.py *.c *.yml *.cfg *.h *.ini Makefile *.whl *.so *.o *.dll *.pyd .gitkeep
\ No newline at end of file
<div align="center"><img src="https://gitlab.com/Plasticity/magnitude/raw/master/images/SuperSQLite.png" alt="magnitude" height="70"></div>
## <div align="center">SuperSQLite: a supercharged SQLite library for Python<br /><br />[![pipeline status](https://gitlab.com/Plasticity/supersqlite/badges/master/pipeline.svg)](https://gitlab.com/Plasticity/supersqlite/commits/master)&nbsp;&nbsp;&nbsp;[![Build Status](https://travis-ci.org/plasticityai/supersqlite.svg?branch=master)](https://travis-ci.org/plasticityai/supersqlite)&nbsp;&nbsp;&nbsp;[![Build status](https://ci.appveyor.com/api/projects/status/72lwh2g7a9ddbnt2/branch/master?svg=true)](https://ci.appveyor.com/project/plasticity-admin/supersqlite/branch/master)<br/>[![PyPI version](https://badge.fury.io/py/supersqlite.svg)](https://pypi.python.org/pypi/supersqlite/)&nbsp;&nbsp;&nbsp;[![license](https://img.shields.io/github/license/mashape/apistatus.svg?maxAge=2592000)](https://gitlab.com/Plasticity/supersqlite/blob/master/LICENSE.txt)&nbsp;&nbsp;&nbsp;[![Python version](https://img.shields.io/pypi/pyversions/supersqlite.svg)](https://pypi.python.org/pypi/supersqlite/)</div>
A feature-packed Python package and for utilizing SQLite in Python by [Plasticity](https://www.plasticity.ai/). It is intended to be a drop-in replacement to Python's built-in [SQLite API](https://docs.python.org/3/library/sqlite3.html), but without any limitations. It offers unique features like [remote streaming over HTTP](#remote-streaming-over-http) and [bundling of extensions like JSON, R-Trees (geospatial indexing), and Full Text Search](#extensions). SuperSQLite is also packaged with pre-compiled native binaries for SQLite and all of its extensions for nearly every platform as to avoid any C/C++ compiler errors during install.
## Table of Contents
- [Installation](#installation)
- [Motivation](#motivation)
- [Using the Library](#using-the-library)
* [Connecting](#connecting)
* [Querying](#querying)
- [Remote Streaming over HTTP](#remote-streaming-over-http)
- [Other Documentation](#other-documentation)
- [Other Programming Languages](#other-programming-languages)
- [Contributing](#contributing)
- [Roadmap](#roadmap)
- [Other Notable Projects](#other-notable-projects)
- [LICENSE and Attribution](#license-and-attribution)
## Installation
You can install this package with `pip`:
```python
pip install supersqlite # Python 2.7
pip3 install supersqlite # Python 3
```
## Motivation
[SQLite](http://www.sqlite.org), is a fast, popular embedded database, used by [large enterprises](https://www.sqlite.org/famous.html). It is the [most widely-deployed database](https://www.sqlite.org/mostdeployed.html) and has billions of deployments. It has a [built-in](https://docs.python.org/3/library/sqlite3.html) binding in Python.
The Python bindings, however, often are compiled against an out-of-date copy of SQLite or may be compiled with [limitations](https://www.sqlite.org/limits.html) set to low levels. Moreover, it is difficult to load extremely useful extensions like [JSON1](https://www.sqlite.org/json1.html) that adds JSON functionality to SQLite or [FTS5](https://www.sqlite.org/fts5.html) that adds full-text search functionality to SQLite since they must be compiled with a C/C++ compiler on each platform before being loaded.
SuperSQLite aims to solve these problems by packaging a newer version of SQLite natively pre-compiled for every platform along with natively pre-compiled SQLite extensions. SuperSQLite also adds useful unique new features like [remote streaming over HTTP](#remote-streaming-over-http) to read from a centralized SQLite database.
## When to use SuperSQLite?
SQLite is [extremely reliable and durable](https://www.sqlite.org/hirely.html) for large amounts of data ([up to 140TB](https://www.sqlite.org/limits.html)). It is considered one of the most [well-engineered and well-tested](https://www.sqlite.org/testing.html) software solutions today, with 711x more test code than implementation code.
SQLite is [faster than nearly every other database](https://www.sqlite.org/speed.html) at read-heavy use cases (especially compared to databases that may use a client-server model with network latency like MySQL, PostgreSQL, MongoDB, DynamoDB, etc.). You can also instantiate SQLite completely in-memory to remove disk latency, if your data will fit within RAM. For key/value use cases, you can get comparable or better [read/write performance to key/value databases like LevelDB](https://sqlite.org/src4/doc/trunk/www/lsmperf.wiki) with the [LSM1 extension](#extensions).
When you have a write-heavy workload with *multiple* servers that need to write concurrently to a shared database (backend to a website), you would probably want to choose something that has a client-server model instead like PostgreSQL, although SQLite can handle processing write requests fast enough that it is sufficient for most concurrent write loads. In fact, Expensify uses [SQLite for their entire backend](https://blog.expensify.com/2018/01/08/scaling-sqlite-to-4m-qps-on-a-single-server/). If you need the database to be automatically replicated or automatically sharded across machines or other distributed features, you probably want to use something else.
See [Appropriate Uses For SQLite](https://www.sqlite.org/whentouse.html) for more information and [Well-Known Users of SQLite](https://www.sqlite.org/famous.html) for example use cases.
## Using the Library
### Connecting
### Querying
### Remote Streaming over HTTP
### Extensions
#### JSON1
#### FTS3, FTS4, FTS5
#### LSM1
#### R\*Tree
### Export SQLite Resources
## Other Documentation
SuperSQLite is a subclass of the [apsw](https://github.com/rogerbinns/apsw) Python SQLite wrapper and extends its functionality. You can find the full documentation for that library [here](https://rogerbinns.github.io/apsw/), which in turn attempts to implement [PEP 249 (DB API)](https://www.python.org/dev/peps/pep-0249/).
Other documentation is not available at this time. See the source file directly (it is well commented) if you need more information about a method's arguments or want to see all supported features.
## Other Programming Languages
Currently, this library only supports Python. There are no plans to port it to any other languages, but since SQLite has a native C implementation and has bindings in most languages, you can use the [export functions](#export-sqlite-resources) to load SuperSQLite's SQLite extensions in the SQLite bindings of other programming languages or link SuperSQLite's version of SQLite to a native binary.
## Contributing
The main repository for this project can be found on [GitLab](https://gitlab.com/Plasticity/supersqlite). The [GitHub repository](https://github.com/plasticityai/supersqlite) is only a mirror. Pull requests for more tests, better error-checking, bug fixes, performance improvements, or documentation or adding additional utilties / functionalities are welcome on [GitLab](https://gitlab.com/Plasticity/supersqlite).
You can contact us at [[email protected]](mailto:[email protected]).
## Roadmap
* Out of the box, "fast-write" configuration option that makes the connection optimized for fast-writing.
* Out of the box, "fast-read" configuration option that makes the conenction optimized for
fast-reading.
* Optimize streaming cache behavior
## Other Notable Projects
* [pysqlite](https://github.com/ghaering/pysqlite) - The built-in `sqlite3` module in Python.
* [apsw](https://github.com/rogerbinns/apsw) - Powers the main API of SuperSQLite, aims to port all of SQLite's API functionality (like VFSes) to Python, not just the query APIs.
* [Magnitude](https://github.com/plasticityai/magnitude/) - Another project by Plasticity that uses SuperSQLite's unique features for machine learning embedding models.
## LICENSE and Attribution
This repository is licensed under the license found [here](LICENSE.txt).
The SQLite "feather" icon is taken from the [SQLite project](https://www.sqlite.org) which is released as [public domain](https://www.sqlite.org/copyright.html).
This project is *not* affiliated with the official SQLite project.
environment:
global:
CIBW_SKIP: cp27-* cp33-* cp34-* *-win32
CIBW_BUILD_VERBOSITY: 3
build_script:
- c:\python27\python.exe -m pip install pip setuptools -U
- pip install cibuildwheel==0.10.0
- cibuildwheel --output-dir wheelhouse
- dir wheelhouse
- pip install awscli --upgrade --user
- aws s3 sync ./wheelhouse/ s3://supersqlite.plasticity.ai/wheelhouse/
artifacts:
- path: "wheelhouse\\*.whl"
name: Wheels
branches:
only:
- appveyor-never-build-branch-automatically-dont-use-this-name
\ No newline at end of file
from __future__ import absolute_import
import ctypes
import re
import warnings
def glibc_version_string():
"Returns glibc version string, or None if not using glibc."
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
# manpage says, "If filename is NULL, then the returned handle is for the
# main program". This way we can let the linker do the work to figure out
# which libc our process is actually using.
process_namespace = ctypes.CDLL(None)
try:
gnu_get_libc_version = process_namespace.gnu_get_libc_version
except AttributeError:
# Symbol doesn't exist -> therefore, we are not linked to
# glibc.
return None
# Call gnu_get_libc_version, which returns a string like "2.5"
gnu_get_libc_version.restype = ctypes.c_char_p
version_str = gnu_get_libc_version()
# py2 / py3 compatibility:
if not isinstance(version_str, str):
version_str = version_str.decode("ascii")
return version_str
# Separated out from have_compatible_glibc for easier unit testing
def check_glibc_version(version_str, required_major, minimum_minor):
# Parse string and check against requested version.
#
# We use a regexp instead of str.split because we want to discard any
# random junk that might come after the minor version -- this might happen
# in patched/forked versions of glibc (e.g. Linaro's version of glibc
# uses version strings like "2.20-2014.11"). See gh-3588.
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
if not m:
warnings.warn("Expected glibc version with 2 components major.minor,"
" got: %s" % version_str, RuntimeWarning)
return False
return (int(m.group("major")) == required_major and
int(m.group("minor")) >= minimum_minor)
def have_compatible_glibc(required_major, minimum_minor):
version_str = glibc_version_string()
if version_str is None:
return False
return check_glibc_version(version_str, required_major, minimum_minor)
# platform.libc_ver regularly returns completely nonsensical glibc
# versions. E.g. on my computer, platform says:
#
# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
# ('glibc', '2.7')
# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
# ('glibc', '2.9')
#
# But the truth is:
#
# ~$ ldd --version
# ldd (Debian GLIBC 2.22-11) 2.22
#
# This is unfortunate, because it means that the linehaul data on libc
# versions that was generated by pip 8.1.2 and earlier is useless and
# misleading. Solution: instead of using platform, use our code that actually
# works.
def libc_ver():
"""Try to determine the glibc version
Returns a tuple of strings (lib, version) which default to empty strings
in case the lookup fails.
"""
glibc_version = glibc_version_string()
if glibc_version is None:
return ("", "")
else:
return ("glibc", glibc_version)
This diff is collapsed.
[metadata]
description-file = README.md
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
debian-build
debian/copyright
debian/changelog
src/shell.c
*.orig
testdb2
apsw.pyd
sqlite3genfkey.c
TAGS
sqlite3async.h
sqlite3async.c
callgrind.out.*
tools/googlecode_upload.py
:memory:
doc/vfs.rst
doc/example.rst
doc/cursor.rst
doc/connection.rst
doc/blob.rst
doc/backup.rst
doc/apsw.rst
doc/vtable.rst
*.pyc
testextension.*
testdbx-journal
testdbx
*~
*.so
*.o
*.gcno
*.gcda
*.gcov
sqlite3/
build/
dist/
sqlite3.h
sqlite3.c
sqlite3ext.h
MANIFEST
MANIFEST.in.tmp
megatestresults/
work/
coverage/
testdb
/.coverage
apsw.egg-info
/.ropeproject/
Copyright (c) 2004-2017 Roger Binns <[email protected]>
See src/traceback.c for code by Greg Ewing.
All code and documentation is provided under this license:
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source
distribution.
Alternatively you may strike the license above and use it under any
OSI approved open source license such as those listed at
http://opensource.org/licenses/alphabetical
# The C source
include src/apsw.c
include src/apswbuffer.c
include src/apswversion.h
include src/backup.c
include src/blob.c
include src/connection.c
include src/cursor.c
include src/exceptions.c
include src/pyutil.c
include src/statementcache.c
include src/traceback.c
include src/testextension.c
include src/util.c
include src/vfs.c
include src/vtable.c
# See https://github.com/rogerbinns/apsw/issues/89
include MANIFEST.in
# other files
include checksums
include mingwsetup.bat
include setup.py
include tools/speedtest.py
include tools/apswtrace.py
# shell is not needed at runtime - we compile it into the C source
include tools/shell.py
include tests.py
SQLITEVERSION=3.25.2
APSWSUFFIX=-r1
RELEASEDATE="7 October 2018"
VERSION=$(SQLITEVERSION)$(APSWSUFFIX)
VERDIR=apsw-$(VERSION)
PYTHON=python
# Some useful info
#
# To use a different SQLite version: make SQLITEVERSION=1.2.3 blah blah
#
# build_ext - builds extension in current directory fetching sqlite
# test - builds extension in place then runs test suite
# doc - makes the doc
# source - makes a source zip in dist directory after running code through test suite
GENDOCS = \
doc/blob.rst \
doc/vfs.rst \
doc/vtable.rst \
doc/connection.rst \
doc/cursor.rst \
doc/apsw.rst \
doc/backup.rst
.PHONY : all docs doc header linkcheck publish showsymbols compile-win source source_nocheck release tags clean ppa dpkg dpkg-bin coverage valgrind valgrind1 tagpush
all: header docs
tagpush:
git tag -af $(SQLITEVERSION)$(APSWSUFFIX)
git push --tags
clean:
make PYTHONPATH="`pwd`" VERSION=$(VERSION) -C doc clean
rm -rf dist build work/* megatestresults apsw.egg-info
mkdir dist
for i in '*.pyc' '*.pyo' '*~' '*.o' '*.so' '*.dll' '*.pyd' '*.gcov' '*.gcda' '*.gcno' '*.orig' '*.tmp' 'testdb*' 'testextension.sqlext' ; do \
find . -type f -name "$$i" -print0 | xargs -0t --no-run-if-empty rm -f ; done
doc: docs
docs: build_ext $(GENDOCS) doc/example.rst doc/.static
env PYTHONPATH=. $(PYTHON) tools/docmissing.py
env PYTHONPATH=. $(PYTHON) tools/docupdate.py $(VERSION)
make PYTHONPATH="`pwd`" VERSION=$(VERSION) RELEASEDATE=$(RELEASEDATE) -C doc clean html
-tools/spellcheck.sh
doc/example.rst: example-code.py tools/example2rst.py src/apswversion.h
rm -f dbfile
env PYTHONPATH=. $(PYTHON) tools/example2rst.py
doc/.static:
mkdir -p doc/.static
# This is probably gnu make specific but only developers use this makefile
$(GENDOCS): doc/%.rst: src/%.c tools/code2rst.py
env PYTHONPATH=. $(PYTHON) tools/code2rst.py $(SQLITEVERSION) $< [email protected]
build_ext:
env APSW_FORCE_DISTUTILS=t $(PYTHON) setup.py fetch --version=$(SQLITEVERSION) --all build_ext --inplace --force --enable-all-extensions
coverage:
env APSW_FORCE_DISTUTILS=t $(PYTHON) setup.py fetch --version=$(SQLITEVERSION) --all && env APSW_PY_COVERAGE=t tools/coverage.sh
test: build_ext
env APSW_FORCE_DISTUTILS=t $(PYTHON) tests.py
debugtest:
gcc -pthread -fno-strict-aliasing -g -fPIC -Wall -DAPSW_USE_SQLITE_CONFIG=\"sqlite3/sqlite3config.h\" -DEXPERIMENTAL -DSQLITE_DEBUG -DAPSW_USE_SQLITE_AMALGAMATION=\"sqlite3.c\" -DAPSW_NO_NDEBUG -DAPSW_TESTFIXTURES -I`$(PYTHON) -c "import distutils.sysconfig,sys; sys.stdout.write(distutils.sysconfig.get_python_inc())"` -I. -Isqlite3 -Isrc -c src/apsw.c
gcc -pthread -g -shared apsw.o -o apsw.so
$(PYTHON) tests.py $(APSWTESTS)
# Needs a debug python. Look at the final numbers at the bottom of
# l6, l7 and l8 and see if any are growing
valgrind: /space/pydebug/bin/python
$(PYTHON) setup.py fetch --version=$(SQLITEVERSION) --all && \
env APSWTESTPREFIX=/tmp/ PATH=/space/pydebug/bin:$$PATH SHOWINUSE=t APSW_TEST_ITERATIONS=6 tools/valgrind.sh 2>&1 | tee l6 && \
env APSWTESTPREFIX=/tmp/ PATH=/space/pydebug/bin:$$PATH SHOWINUSE=t APSW_TEST_ITERATIONS=7 tools/valgrind.sh 2>&1 | tee l7 && \
env APSWTESTPREFIX=/tmp/ PATH=/space/pydebug/bin:$$PATH SHOWINUSE=t APSW_TEST_ITERATIONS=8 tools/valgrind.sh 2>&1 | tee l8
# Same as above but does just one run
valgrind1: /space/pydebug/bin/python
$(PYTHON) setup.py fetch --version=$(SQLITEVERSION) --all && \
env APSWTESTPREFIX=/tmp/ PATH=/space/pydebug/bin:$$PATH SHOWINUSE=t APSW_TEST_ITERATIONS=1 tools/valgrind.sh
linkcheck:
make RELEASEDATE=$(RELEASEDATE) VERSION=$(VERSION) -C doc linkcheck
publish: docs
if [ -d ../apsw-publish ] ; then rm -f ../apsw-publish/* ../apsw-publish/_static/* ../apsw-publish/_sources/* ; \
rsync -a doc/build/html/ ../apsw-publish/ ; cd ../apsw-publish ; git status ; \
fi
header:
echo "#define APSW_VERSION \"$(VERSION)\"" > src/apswversion.h
# the funky test stuff is to exit successfully when grep has rc==1 since that means no lines found.
showsymbols:
rm -f apsw.so
$(PYTHON) setup.py fetch --all --version=$(SQLITEVERSION) build_ext --inplace --force --enable-all-extensions
test -f apsw.so
set +e; nm --extern-only --defined-only apsw.so | egrep -v ' (__bss_start|_edata|_end|_fini|_init|initapsw)$$' ; test $$? -eq 1 || false
# Getting Visual Studio 2008 Express to work for 64 compilations is a
# pain, so use this builtin hidden command
WIN64HACK=win64hackvars
WINBPREFIX=fetch --version=$(SQLITEVERSION) --all build --enable-all-extensions
WINBSUFFIX=install build_test_extension test
WINBINST=bdist_wininst
WINBMSI=bdist_msi
# You need to use the MinGW version of make. See
# http://bugs.python.org/issue3308 if 2.6+ or 3.0+ fail to run with
# missing symbols/dll issues. For Python 3.1 they went out of their