diff --git a/contrib/retep/CHANGELOG b/contrib/retep/CHANGELOG
deleted file mode 100644
index 188c40129e2..00000000000
--- a/contrib/retep/CHANGELOG
+++ /dev/null
@@ -1,7 +0,0 @@
-Fri Mar 02 16:08:00 GMT 2001 peter@retep.org.uk
- - Started importing in the rest of the retep tools.
-
-Tue Jan 23 10:19:00 GMT 2001 peter@retep.org.uk
- - Finished the XML Export classes
- - First of the test data suite now in CVS.
-
diff --git a/contrib/retep/Implementation b/contrib/retep/Implementation
deleted file mode 100644
index b3125acf0ed..00000000000
--- a/contrib/retep/Implementation
+++ /dev/null
@@ -1,116 +0,0 @@
-Retep Tools Implementation
---------------------------
-
-
-The tools are designed to be put into a single jar file, but each one is
-executable either individually or part of one single application.
-
-To run the big application, you can either:
-
- java -jar retepTools.jar
-
-or with the retepTools.jar in the classpath run:
-
- java uk.org.retep.tools.Main
-
-Windows users: For you you can also double click the retepTools.jar as windows
-will automatically run javac for you.
-
-To run the individual tools, you must have the .jar file in your classpath and
-then run the relevant Main class.
-
-Tool Type Class
-------------------------------------------------------------------------------
-pg_hba.conf Editor/repairer Editor uk.org.retep.util.hba.Main
-Properties Editor Editor uk.org.retep.util.proped.Main
-
-
-Layout of the classes
----------------------
-
-Simply, tools that work on property files (Java properties, resource files,
-configuration settings - pg_hba.conf for example) go under uk.org.retep.util in
-their own package. Other utility classes (like PropertyIO) go in to the
-uk.org.retep.util.misc package except for certain ones where they are related.
-
-ie: TableModels. In swing you have JTable which uses a TableModel to display
-(and possibly update) some data. These go under uk.org.retep.util.models where
-you will find PropertiesTableModel for example. This one allows a Properties
-object to be displayed & updated.
-
-Come core classes like Logger, ExceptionDialog etc go into the main
-uk.org.retep.util package.
-
-Directory/Package Contents
-------------------------------------------------------------------------------
-uk.org.retep Home of the tools.properties file
-uk.org.retep.tools The main all-in-one application
-uk.org.retep.dtu The Data Transform Unit
-uk.org.retep.util Core utility classes
-uk.org.retep.util.hba pg_hba.conf editor/repairer
-uk.org.retep.util.misc Misc utility classes
-uk.org.retep.util.models Swing table models
-uk.org.retep.util.proped Property Editor
-uk.org.retep.util.xml.core Basic XML Factory
-uk.org.retep.util.xml.jdbc JDBC/XML interface
-uk.org.retep.util.xml.parser Simple SAX parser
-
-Structure of a tool
--------------------
-
-Each tool has at least 2 base classes, and an entry in the tools.properties
-file. For this example, I'll show you the Properties Editor:
-
-Base package uk.org.retep.util.proped
-Main tool class uk.org.retep.util.proped.PropertyEditor
-Standalone class uk.org.retep.util.proped.Main
-
-The main tool class is the entry point used by the main application. Because
-they are used in a GUI, this class must extend javax.swing.JComponent and
-implement the uk.org.retep.tools.Tool interface. (NB: You will find I always
-use JPanel, but JComponent is used here so that any swing class can be used
-you are not limited to JPanel.)
-
-The standalone class is a basic static class that implements the main method.
-It should extend the uk.org.retep.misc.StandaloneApp class and be written along
-the lines of the following example:
-
- import uk.org.retep.util.StandaloneApp;
- import javax.swing.JComponent;
-
- public class Main extends StandaloneApp
- {
- public Main(String[] args)
- throws Exception
- {
- super(args);
- }
-
- public JComponent init()
- throws Exception
- {
- // Your initialisation here. In this case the PropertyEditor
- PropertyEditor panel = new PropertyEditor();
-
- // do stuff here, ie load a file if supplied
-
- // return the tool
- return panel;
- }
-
- public static void main(String[] args)
- throws Exception
- {
- Main main = new Main(args);
- main.pack();
- main.setVisible(true);
- }
- }
-
-you will find a template in the uk.org.retep.util.Main class. Simply copy this
-classes source, as it gives you the basic stub. Just add your own implementation
-if init() like the one above. Look at the full Main class for the
-PropertiesEditor to see how to get at the command line args.
-
-By convention, the standalone class is named Main.
-
diff --git a/contrib/retep/Makefile b/contrib/retep/Makefile
deleted file mode 100644
index e5de9c7ac80..00000000000
--- a/contrib/retep/Makefile
+++ /dev/null
@@ -1,30 +0,0 @@
-#-------------------------------------------------------------------------
-#
-# Makefile for contributed retep tools
-#
-# Copyright (c) 2001, PostgreSQL Global Development Group
-#
-# $Header: /cvsroot/pgsql/contrib/retep/Attic/Makefile,v 1.1 2001/07/06 23:07:20 petere Exp $
-#
-#-------------------------------------------------------------------------
-
-subdir = contrib/retep
-top_builddir = ../..
-include $(top_builddir)/src/Makefile.global
-
-all:
- $(ANT) -buildfile $(srcdir)/build.xml all
-
-install: installdirs
- $(ANT) -buildfile $(srcdir)/build.xml install \
- -Dinstall.directory=$(javadir)
-
-installdirs:
- $(mkinstalldirs) $(javadir)
-
-uninstall:
- $(ANT) -buildfile $(srcdir)/build.xml uninstall \
- -Dinstall.directory=$(javadir)
-
-clean distclean maintainer-clean:
- $(ANT) -buildfile $(srcdir)/build.xml clean
diff --git a/contrib/retep/README b/contrib/retep/README
deleted file mode 100644
index 5355c9d99f3..00000000000
--- a/contrib/retep/README
+++ /dev/null
@@ -1,35 +0,0 @@
-Before you ask what retepTools are, they are my personal suite of utilities.
-About 90% of them are JDBC related (either they use JDBC, or I use them in
-developing the JDBC driver).
-
-Now, because of various reasons I won't go into now, in January 2001 I decided
-to release the entire lot to the public. I could have used something like
-SourceForge, but as they are mainly JDBC related I thought here is the best
-place.
-
-Now all (bar retepPDF, see end-note) will over the next few months be going
-into the /contrib/retep directory. They range from simple XML Inport/Export
-classes to entire sub-systems that can be plugged into applications.
-
-All this lot were never released, so I'm placing them under PostgreSQL's
-licence.
-
-Please refer to Implementation for details of what package does what.
-
-It all requires Java2SE (JDK1.2) as a minimum. I do have some plans for some
-EJB tools later, so those will need Java2EE, but not yet ;-)
-
-Peter Mount
-peter@retep.org.uk
-March 2 2001
-
-retepPDF: This is not included for two reasons:
-
-1: It's big and not really related in any way to PostgreSQL
-2: More importantly, I (may be foolishly) released it some 3 years ago under
- the LGPL. As a few people have added to it, it's not really possible to
- change the licence, and I don't want to polute PostgreSQL's source tree ;-)
-
-retepGraph: This was an old graphics library. It's been obsolete for 3 years
-now, so it's not going in.
-
diff --git a/contrib/retep/build.xml b/contrib/retep/build.xml
deleted file mode 100644
index 04a8db5c2f0..00000000000
--- a/contrib/retep/build.xml
+++ /dev/null
@@ -1,98 +0,0 @@
-
-
-
-
-]>
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-*** WARNING: Contributed retep tools need jdk1.2 or later.
-*** Compilation NOT done
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/contrib/retep/data/cds.dtd b/contrib/retep/data/cds.dtd
deleted file mode 100644
index df542c34768..00000000000
--- a/contrib/retep/data/cds.dtd
+++ /dev/null
@@ -1,16 +0,0 @@
-
-
-
-
-
-
-
-
diff --git a/contrib/retep/data/cds.xml b/contrib/retep/data/cds.xml
deleted file mode 100644
index fb0203bda90..00000000000
--- a/contrib/retep/data/cds.xml
+++ /dev/null
@@ -1,2691 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/contrib/retep/retep.jpx b/contrib/retep/retep.jpx
deleted file mode 100644
index dcf68ac8ceb..00000000000
--- a/contrib/retep/retep.jpx
+++ /dev/null
@@ -1,55 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/contrib/retep/uk/org/retep/dtu/DCollection.java b/contrib/retep/uk/org/retep/dtu/DCollection.java
deleted file mode 100644
index e97fc067c43..00000000000
--- a/contrib/retep/uk/org/retep/dtu/DCollection.java
+++ /dev/null
@@ -1,228 +0,0 @@
-package uk.org.retep.dtu;
-
-import uk.org.retep.xml.core.XMLFactory;
-import uk.org.retep.xml.core.XMLFactoryException;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Iterator;
-
-public class DCollection implements Collection
-{
- protected int num,max,inc;
-
- protected DElement elements[];
-
- public DCollection()
- {
- this(10);
- }
-
- public DCollection(int aIncrement)
- {
- num=0;
- max=0;
- inc=aIncrement;
- elements=null;
- }
-
- protected void resize()
- {
- if(num>=max) {
- max+=inc;
- DElement n[] = new DElement[max];
- if(elements!=null) {
- System.arraycopy(elements,0,n,0,elements.length);
- }
- elements=n;
- }
- }
-
- public int size()
- {
- return num;
- }
-
- public boolean isEmpty()
- {
- return (num==0);
- }
-
- /**
- * Checks the list using it's XML id.
- */
- public synchronized boolean contains(Object parm1)
- {
- if(parm1 instanceof DElement) {
- DElement e = (DElement) parm1;
- int ei = e.getID();
-
- // out of range?
- if(ei<0 || ei>=num)
- return false;
-
- return elements[ei].equals(e);
- }
-
- return false;
- }
-
- public Iterator iterator()
- {
- return new iterator(this);
- }
-
- /**
- * Inner class to implement an Iterator
- */
- protected class iterator implements Iterator
- {
- protected DCollection c;
- protected int i;
-
- public iterator(DCollection aCollection)
- {
- c=aCollection;
- i=0;
- }
-
- public boolean hasNext()
- {
- return i-1) {
- return false;
- }
-
- // Add to the Collection
- resize();
- e.setID(num);
- elements[num++] = e;
- return true;
- }
- return false;
- }
-
- public synchronized boolean remove(Object parm1)
- {
- if(parm1 instanceof DElement) {
- DElement e = (DElement) parm1;
- int ei = e.getID();
- if(ei<0 || ei>=num)
- return false;
-
- // Mark the node as parentless
- e.setID(-1);
-
- // Now remove from the array by moving latter nodes, fixing their ids
- // in the process
- for(int j=ei,k=ei+1;k=num)
- return null;
-
- return elements[id];
- }
-
- /**
- * Repairs the collection, ensuring all id's are correct
- */
- public synchronized void repair()
- {
- for(int i=0;iNB: args is volatile, so if you use it beyond the lifetime of
- * this call, then you must make a copy of the HashMap (and not use simply
- * store this HashMap).
- * @param level The number of tags above this
- * @param tag The tag name
- * @param args A HashMap of any arguments
- */
- public void tagStart(int level,String tag,HashMap args)
- {
- Logger.log(Logger.DEBUG,"DModuleXML.tagStart",tag);
-
- // Prefetch some common attributes
- String sType = (String) args.get(DConstants.XML_TYPE);
- String sX = (String) args.get(DConstants.XML_X);
- String sY = (String) args.get(DConstants.XML_Y);
-
- int type=-1,x=-1,y=-1;
-
- if(sType!=null) {
- type = Integer.parseInt(sType);
- }
-
- if(sX!=null) {
- y = Integer.parseInt(sX);
- }
-
- if(sY!=null) {
- x = Integer.parseInt(sY);
- }
-
- // Match the tag against the tags array (used for switch() )
- int tagID=T_DEFAULT;
- for(int i=0;iNB: content is volatile, so you must copy its contents if you use
- * it beyond the lifetime of this call.
- * @param content CharArrayWriter containing the content of the tag.
- */
- public void tagContent(CharArrayWriter content)
- {
- // Ignore
- }
-
- public void fixTransforms()
- {
- DNode to;
- Iterator it = txmap.iterator();
-
- while(it.hasNext()) {
- tx x = (tx) it.next();
-
- //Logger.log(Logger.DEBUG,"Fixing transform "+x.toID,x.transform,Integer.toString(x.node.getID()),Integer.toString(module.getNode(x.toID).getID()));
- to = module.getNode(x.toID);
-
- x.transform.setFrom(x.node);
- x.transform.setTo(to);
- //to.setFrom(x.transform);
- }
-
- }
-
- /**
- * Parse an InputSource and return the contained module.
- * @return DModule loaded, null if the xml file does not contain a module.
- */
- public DModule parse(InputSource is)
- throws IOException,SAXException
- {
- getTagHandler().parse(is);
- fixTransforms();
- return module;
- }
-
- /**
- * Parse an uri and return the contained module.
- * @return DModule loaded, null if the xml file does not contain a module.
- */
- public DModule parse(String uri)
- throws IOException,SAXException
- {
- getTagHandler().parse(uri);
- fixTransforms();
- return module;
- }
-
- /**
- * Debug test - read xml from one file and save to another.
- */
- public static void main(String args[]) throws Exception
- {
- if(args.length!=2) {
- System.err.println("Syntax: java DModuleXML in-file out-file");
- System.exit(1);
- }
-
- Logger.setLevel(Logger.DEBUG);
-
- Logger.log(Logger.INFO,"DModuleXML Read test1.xml");
- DModuleXML dm = new DModuleXML();
- DModule module = dm.parse(new InputSource(new FileInputStream(args[0])));
-
- Logger.log(Logger.INFO,"Parse complete");
-
- Logger.log(Logger.INFO,"DModuleXML Write XML");
- FileWriter fw = new FileWriter(args[1]);
- module.saveXML(new XMLFactory(fw));
- fw.close();
- Logger.log(Logger.INFO,"Write complete");
-
- DProcessor.run(module);
- }
-}
\ No newline at end of file
diff --git a/contrib/retep/uk/org/retep/dtu/DNode.java b/contrib/retep/uk/org/retep/dtu/DNode.java
deleted file mode 100644
index 7a8321741cd..00000000000
--- a/contrib/retep/uk/org/retep/dtu/DNode.java
+++ /dev/null
@@ -1,233 +0,0 @@
-package uk.org.retep.dtu;
-
-import uk.org.retep.util.Logger;
-import uk.org.retep.xml.core.XMLFactory;
-import uk.org.retep.xml.core.XMLFactoryException;
-
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.Iterator;
-
-/**
- * This is the base class for all nodes.
- */
-public class DNode implements DElement, Serializable
-{
- // The id of this node
- protected int id;
-
- // The type of this node
- protected int type;
-
- protected int x,y;
-
- public static final int OK = 0; // Node last ran fine
- public static final int ERROR = 1; // Node failed on last run
-
- /**
- * This type of node does nothing
- */
- public static int NOP = 0; // No action
-
- public DNode()
- {
- this(NOP);
- }
-
- public DNode(int aType)
- {
- id=-1;
- type=aType;
-
- // Init the transform linkage
- mf=mt=5;
- nf=nt=0;
- fn = new DTransform[mf];
- tn = new DTransform[mt];
-
- Logger.log(Logger.DEBUG,"new DNode");
- }
-
- public int getID()
- {
- return id;
- }
-
- public void setID(int aID)
- {
- id=aID;
- Logger.log(Logger.DEBUG,"DNode.setID",aID);
- }
-
- public int getType()
- {
- return type;
- }
-
- public void setType(int aType)
- {
- type=aType;
- Logger.log(Logger.DEBUG,"DNode.setType",aType);
- }
-
- /**
- */
- public void saveXML(XMLFactory aFactory)
- throws IOException, XMLFactoryException
- {
- Logger.log(Logger.DEBUG,"DNode.saveXML start",this);
- Iterator it;
-
- aFactory.startTag(DConstants.XML_NODE);
- aFactory.addAttribute(DConstants.XML_ID,new Integer(getID()));
- aFactory.addAttribute(DConstants.XML_TYPE,new Integer(getType()));
-
- // used for display only
- aFactory.addAttribute(DConstants.XML_X,new Integer(getX()));
- aFactory.addAttribute(DConstants.XML_Y,new Integer(getY()));
-
- // Save the transforms here (only the from list required)
- for(int i=0;i=mf) {
- mf+=5;
- DTransform nn[] = new DTransform[mf];
- System.arraycopy(fn,0,nn,0,nf);
- fn=nn;
- }
- fn[nf++]=aTransform;
- }
-
- /**
- * Adds a transform to this node (called by DTransform)
- */
- protected synchronized void setTo(DTransform aTransform)
- {
- for(int i=0;i=mt) {
- mt+=5;
- DTransform nn[] = new DTransform[mt];
- System.arraycopy(tn,0,nn,0,nt);
- tn=nn;
- }
- tn[nt++]=aTransform;
- }
-
- /**
- * Removes a transform (called by DTransform)
- */
- protected synchronized void removeFrom(DTransform aTransform)
- {
- for(int i=0;i0) {
- int numThreads = group.activeCount();
- Thread threads[] = new Thread[numThreads];
- cnt = group.enumerate(threads,false);
-
- //Logger.log(Logger.DEBUG,"Waiting on threads",cnt);
- while(cnt>0) {
- //Logger.log(Logger.DEBUG,"Waiting on thread",cnt);
- threads[--cnt].join(timeout);
- }
-
- Logger.log(Logger.DEBUG,"All threads appear to have died, retesting");
- }
- } catch(InterruptedException ie) {
- Logger.log(Logger.ERROR,"DProcessor, exception caught while waiting for threads to die",ie);
- }
-
- // finally close any open datasources
- Logger.log(Logger.DEBUG,"DProcessor cleanup");
-
- Logger.log(Logger.DEBUG,"DProcessor finished");
- }
-
- class proc implements Runnable
- {
- protected DModule module; // The module being run
- protected DNode pc; // current Program Counter
-
- protected DEnvironment env; // Shared environment
-
- // Used when launching new threads only
- protected DTransform trans; // If not null, a transform to run first
- protected int status;
-
- protected Thread thread;
-
- /**
- * Start processing from DNode aNode. This is called by DProcessor at
- * initialisation only.
- */
- protected proc(ThreadGroup aGroup,DModule aModule,DNode aNode,DEnvironment aEnv)
- {
- // aGroup will be null when forking...
- if(aGroup==null) {
- thread = new Thread(this);
- } else {
- thread = new Thread(aGroup,this);
- }
-
- module = aModule;
- pc = aNode;
- env = aEnv;
- }
-
- /**
- * Start processing the DTransform aTransform from aNode (does not execute
- * the node). This is called by this inner class itself when forking new
- * threads.
- */
- protected proc(DModule aModule,DNode aNode,DEnvironment aEnv,DTransform aTransform,int aStatus)
- {
- this(null,aModule,aNode,aEnv);
- trans = aTransform;
- status = aStatus;
- }
-
- /**
- * Start this thread
- */
- public void start()
- {
- thread.start();
- }
-
- public void run()
- {
- // Handle an initial transform first. It's used when a new Thread was created.
- if(trans!=null) {
- transform(trans,false,status);
- trans=null;
- }
-
- while(pc!=null) {
- //Logger.log(Logger.DEBUG,"running node ",pc.getID());
-
- // Process the node
- int status = pc.run(env);
- //Logger.log(Logger.DEBUG," status ",status);
-
- // Now the transforms. This thread continues with the first one that runs,
- // but any others that will also run will do so in their own thread.
- // If no transform runs (or there are none), then the thread will die.
- int numTrans = pc.getToTransforms();
- boolean fork=false;
- for(int i=0;i1) {
- // Split the option at the first '=' char if any
- int s = arg.startsWith("--") ? 2 : 1 ; // -- or -
- int e = arg.indexOf("=");
- String key,val;
- if(e>s) {
- // Format: -key=value
- key=arg.substring(s,e-1);
- val=arg.substring(e+1);
- } else if(e>-1 && e<=s) {
- // Can't have a property without a key!
- throw new Exception("Invalid option -=");
- } else {
- key=arg.substring(s);
- val=""; // can't be null
- }
-
- if(key.equals("d")) {
- // -d | --d is reserved to set the Logger level
- int level=0;
- if(!val.equals("")) {
- level=Integer.parseInt(val);
- }
- Logger.setLevel(level);
- } else {
- // Add all other properties into the Properties object
- props.put(key,val);
- Logger.log(Logger.INFO,"Argument",key,val);
- }
-
- } else {
- // Just a - on its own?
- System.out.println("Unknown option: -");
- }
- } else {
- // Add the argument to the array
- args.add(arg);
- }
- }
- }
-
-}
\ No newline at end of file
diff --git a/contrib/retep/uk/org/retep/util/Logger.java b/contrib/retep/uk/org/retep/util/Logger.java
deleted file mode 100644
index c272f1d005a..00000000000
--- a/contrib/retep/uk/org/retep/util/Logger.java
+++ /dev/null
@@ -1,150 +0,0 @@
-package uk.org.retep.util;
-
-import java.io.CharArrayWriter;
-import java.io.PrintWriter;
-
-public class Logger
-{
- protected static int level;
- protected static PrintWriter logger;
-
- public static final int NONE = -1;
- public static final int INFO = 0;
- public static final int ERROR = 1;
- public static final int DEBUG = 2;
- public static final int ALL = 3;
-
- static {
- level = NONE;
- logger = null;
- };
-
- private static final String levels[] = {
- "INFO :",
- "ERROR:",
- "DEBUG:",
- "ALL :"
- };
-
- public static void setLevel(int aLevel)
- {
- // Incase we have not yet set a logger
- if(logger==null) {
- logger = new PrintWriter(System.out);
- }
-
- if(aLevelALL) {
- aLevel=ALL;
- }
-
- level=aLevel;
-
- if(level>NONE) {
- log(INFO,"Log level changed to",level,levels[level]);
- }
- }
-
- public static void setLogger(PrintWriter pw)
- {
- if(logger!=null) {
- try {
- logger.flush();
- logger.close();
- } catch(Exception ex) {
- logger=pw;
- log(ERROR,"Exception while closing logger",ex);
- }
- }
- logger=pw;
- }
-
- public static void log(String msg)
- {
- log(INFO,msg);
- }
-
- public static void log(int aLevel,String msg)
- {
- write(aLevel,msg,null);
- }
-
- public static void log(int aLevel,String msg,int arg1)
- {
- Object o[] = {new Integer(arg1)};
- write(aLevel,msg,o);
- }
-
- public static void log(int aLevel,String msg,int arg1,Object arg2)
- {
- Object o[] = {new Integer(arg1),arg2};
- write(aLevel,msg,o);
- }
-
- public static void log(int aLevel,String msg,double arg1)
- {
- Object o[] = {new Double(arg1)};
- write(aLevel,msg,o);
- }
-
- public static void log(int aLevel,String msg,double arg1,Object arg2)
- {
- Object o[] = {new Double(arg1),arg2};
- write(aLevel,msg,o);
- }
-
- public static void log(int aLevel,String msg,Object arg1)
- {
- Object o[] = {arg1};
- write(aLevel,msg,o);
- }
-
- public static void log(int aLevel,String msg,Object arg1,Object arg2)
- {
- Object o[] = {arg1,arg2};
- write(aLevel,msg,o);
- }
-
- public static void log(int aLevel,String msg,Object arg1,Object arg2,Object arg3)
- {
- Object o[] = {arg1,arg2,arg3};
- write(aLevel,msg,o);
- }
-
- public static void log(int aLevel,String msg,Throwable t)
- {
- CharArrayWriter buffer = new CharArrayWriter();
- PrintWriter printWriter = new PrintWriter(buffer);
- t.printStackTrace(printWriter);
- Object o[] = {buffer.toString()};
- buffer.close();
- write(aLevel,msg,o);
- }
-
- private static void write(int aLevel,String aMsg,Object args[])
- {
- // Can't be above ALL
- if(aLevel>ALL) {
- aLevel=ALL;
- }
-
- // Ignore if below or equal to NONE
- if(aLevellevel) {
- return;
- }
-
- logger.print("Logger:");
- logger.print(levels[aLevel]);
- logger.print(aMsg);
- if(args!=null) {
- for(int a=0;a0) {
- editor.openFile(globals.getArgument(0));
- }
-
- return editor;
- }
-
- public static void main(String[] args)
- throws Exception
- {
- Main main = new Main(args);
- main.pack();
- main.setVisible(true);
- }
-}
diff --git a/contrib/retep/uk/org/retep/util/hba/Record.java b/contrib/retep/uk/org/retep/util/hba/Record.java
deleted file mode 100644
index b91e6dc49df..00000000000
--- a/contrib/retep/uk/org/retep/util/hba/Record.java
+++ /dev/null
@@ -1,238 +0,0 @@
-package uk.org.retep.util.hba;
-
-import uk.org.retep.util.Logger;
-import uk.org.retep.util.misc.IPAddress;
-import uk.org.retep.util.misc.WStringTokenizer;
-
-/**
- * Used to store the entries of a pg_hba.conf file
- * @author
- * @version 1.0
- */
-
-public class Record
-{
- int type;
- String dbname;
- IPAddress ip;
- IPAddress mask;
- int authType;
- String authArg;
-
- public static final int TYPE_LOCAL = 0;
- public static final int TYPE_HOST = 1;
- public static final int TYPE_HOSTSSL = 2;
-
- public static final String types[] = {
- "local","host","hostssl"
- };
-
- public static final int AUTH_TRUST = 0;
- public static final int AUTH_PASSWORD = 1;
- public static final int AUTH_CRYPT = 2;
- public static final int AUTH_IDENT = 3;
- public static final int AUTH_KRB4 = 4;
- public static final int AUTH_KRB5 = 5;
- public static final int AUTH_REJECT = 6;
-
- public static final String auths[] = {
- "trust","password","crypt",
- "ident",
- "krb4","krb5",
- "reject"
- };
-
- private static final String spc = " ";
-
- public Record()
- {
- }
-
- public int getType()
- {
- return type;
- }
-
- public void setType(int aType)
- {
- type=aType;
- }
-
- public String getDatabase()
- {
- return dbname;
- }
-
- public void setDatabase(String aDB)
- {
- dbname=aDB;
- }
-
- public int getAuthType()
- {
- return authType;
- }
-
- public void setAuthType(int aType)
- {
- authType=aType;
- }
-
- public String getAuthArgs()
- {
- return authArg;
- }
-
- public void setAuthArgs(String aArg)
- {
- authArg=aArg;
- }
-
- public IPAddress getIP()
- {
- return ip;
- }
-
- public void setIP(String aArg)
- {
- setIP(new IPAddress(aArg));
- }
-
- public void setIP(IPAddress aArg)
- {
- ip=aArg;
- }
-
- public IPAddress getMask()
- {
- return mask;
- }
-
- public void setMask(String aArg)
- {
- setMask(new IPAddress(aArg));
- }
-
- public void setMask(IPAddress aArg)
- {
- mask=aArg;
- }
-
- public String toString()
- {
- StringBuffer buf = new StringBuffer();
- write(buf);
- return buf.toString();
- }
-
- public void write(StringBuffer buf)
- {
- buf.append(types[type]).append(spc);
-
- if(type==TYPE_HOST || type==TYPE_HOSTSSL) {
- buf.append(getIP()).append(spc);
- buf.append(getMask()).append(spc);
- }
-
- buf.append(auths[authType]);
-
- // Now the authArg
- switch(type)
- {
- // These have no authArgs
- case AUTH_TRUST:
- case AUTH_REJECT:
- case AUTH_KRB4:
- case AUTH_KRB5:
- break;
-
- // These must have an arg
- case AUTH_IDENT:
- buf.append(spc).append(getAuthArgs());
- break;
-
- // These may have an optional arg
- case AUTH_PASSWORD:
- case AUTH_CRYPT:
- if(!(authArg==null || authArg.equals("")))
- buf.append(spc).append(getAuthArgs());
- break;
- }
- }
-
- private static WStringTokenizer tok;
-
- public static Record parseLine(String s)
- {
- Record res = new Record();
- int type;
-
- if(s==null || s.equals("") || s.startsWith("#"))
- return null;
-
- if(tok==null)
- tok=new WStringTokenizer();
-
- tok.setString(s);
-
- type=WStringTokenizer.matchToken(types,tok.nextToken());
- res.setType(type);
-
- res.setDatabase(tok.nextToken());
-
- if(type==TYPE_HOST || type==TYPE_HOSTSSL) {
- res.setIP(new IPAddress(tok.nextToken()));
- res.setMask(new IPAddress(tok.nextToken()));
- }
-
- res.setAuthType(WStringTokenizer.matchToken(auths,tok.nextToken()));
- res.setAuthArgs(tok.nextToken());
-
- return res;
- }
-
- public static final int VALID = 0;
- public static final int INVALID_TYPE = 1;
- public static final int INVALID_IPREQUIRED = 2;
-
- /**
- * Validates the record
- */
- public int validate()
- {
- switch(type)
- {
- case TYPE_HOST:
- case TYPE_HOSTSSL:
- if(ip==null || ip.isInvalid()) {
- Logger.log(Logger.INFO,"pg_hba.conf: IP missing or invalid - repairing");
- setMask("127.0.0.1");
- }
-
- if(mask==null || mask.isInvalid() || !ip.validateMask(mask)) {
- Logger.log(Logger.INFO,"pg_hba.conf: IP address without mask - repairing");
- setMask(ip.getMask());
- }
-
- break;
-
- case TYPE_LOCAL:
- break;
-
- default:
- return INVALID_TYPE;
- }
-
- return VALID;
- }
-
- /*
-# host all 192.168.54.1 255.255.255.255 reject
-# host all 0.0.0.0 0.0.0.0 krb5
-# host all 192.168.0.0 255.255.0.0 ident omicron
-#
-
-local all trust
-host all 127.0.0.1 255.255.255.255 trust
-*/
-}
\ No newline at end of file
diff --git a/contrib/retep/uk/org/retep/util/misc/IPAddress.java b/contrib/retep/uk/org/retep/util/misc/IPAddress.java
deleted file mode 100644
index a04babde3a2..00000000000
--- a/contrib/retep/uk/org/retep/util/misc/IPAddress.java
+++ /dev/null
@@ -1,125 +0,0 @@
-package uk.org.retep.util.misc;
-
-import java.util.StringTokenizer;
-
-/**
- * Represent an IP address
- * @author
- * @version 1.0
- */
-
-public class IPAddress
-{
- protected long address;
- protected long b[] = new long[4];
- protected boolean invalid=true;
-
- public IPAddress()
- {
- }
-
- public IPAddress(String s)
- {
- setAddress(s);
- }
-
- public synchronized void setAddress(String s)
- {
- if(s==null || s.equals("")) {
- invalid=true;
- return;
- }
-
- address=0;
- StringTokenizer tok = new StringTokenizer(s,".");
- int i=0;
- while(i<4 && tok.hasMoreElements()) {
- b[i++] = Long.parseLong(tok.nextToken());
- }
- while(i<4) {
- b[i++]=0;
- }
-
- invalid=false;
- refresh();
- }
-
- public void refresh()
- {
- if(invalid)
- return;
- address = (b[0]<<24) | (b[1]<<16) | (b[2]<<8) | (b[3]);
- }
-
- public boolean isInvalid()
- {
- refresh();
- return invalid;
- }
-
- public String toString()
- {
- refresh();
- if(invalid)
- return "*INVALID*";
-
- return Long.toString(b[0])+"."+Long.toString(b[1])+"."+Long.toString(b[2])+"."+Long.toString(b[3]);
- }
-
- public boolean equals(Object o)
- {
- if(o instanceof IPAddress) {
- IPAddress ip = (IPAddress) o;
-
- refresh();
- ip.refresh();
-
- if(ip.invalid == invalid)
- return false;
-
- return address==ip.address;
- }
- return false;
- }
-
- private static int gethoststart(long b)
- {
- if((b & 0x80)==0x00) return 1; // class A
- if((b & 0xc0)==0x80) return 2; // class B
- if((b & 0xe0)==0xc0) return 3; // class C
- return 4; // class D
- }
-
- public boolean validateMask(IPAddress mask)
- {
- // If were a network check the host mask
- int i=gethoststart(b[0]);
-System.out.println("Host start "+i);
- while(i<4 && b[i]==0) {
- if(mask.b[i++]>0)
- return false;
- }
-
- for(i=0;i<4;i++) {
- if((b[i]&mask.b[i])!=b[i])
- return false;
- }
-
- return true;
- }
-
- public IPAddress getMask()
- {
- IPAddress mask = new IPAddress();
- int i=3;
- while(i>-1 && b[i]==0) {
- mask.b[i--]=0;
- }
- while(i>-1) {
- mask.b[i--]=255;
- }
- mask.invalid=false;
- mask.refresh();
- return mask;
- }
-}
\ No newline at end of file
diff --git a/contrib/retep/uk/org/retep/util/misc/PropertiesIO.java b/contrib/retep/uk/org/retep/util/misc/PropertiesIO.java
deleted file mode 100644
index 7bed62c4d28..00000000000
--- a/contrib/retep/uk/org/retep/util/misc/PropertiesIO.java
+++ /dev/null
@@ -1,157 +0,0 @@
-package uk.org.retep.util.misc;
-
-import java.io.*;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.Properties;
-import java.util.TreeMap;
-
-/**
- * Misc Properties utilities..
- * @author
- * @version 1.0
- */
-
-public class PropertiesIO
-{
-
- public PropertiesIO()
- {
- }
-
- /**
- * Builds a TreeMap based on the given Properties object. This is useful
- * because the keys will be in sorted order.
- */
- public static TreeMap getTreeMap(Properties p)
- {
- TreeMap map = new TreeMap();
- Iterator e = p.keySet().iterator();
- while(e.hasNext()) {
- Object k = e.next();
- map.put(k,p.get(k));
- }
- return map;
- }
-
- /**
- * Writes a Properties file to the writer. This is similar to Properties.save
- * except you can pick the key/value separator
- */
- public static synchronized void save(Properties p,OutputStream out,char sep,String header)
- throws IOException
- {
- save(p,p.keySet().iterator(),out,sep,header);
- }
-
- /**
- * Writes a Properties file to the writer. This is similar to Properties.save
- * except you can pick the key/value separator and the keys are written
- * in a sorted manner
- */
- public static synchronized void saveSorted(Properties p,OutputStream out,char sep,String header)
- throws IOException
- {
- save(p,getTreeMap(p).keySet().iterator(),out,sep,header);
- }
-
- /**
- * This is the same as save, only the keys in the enumeration are written.
- */
- public static synchronized void save(Properties p,Iterator e, OutputStream out,char sep,String header)
- throws IOException
- {
- BufferedWriter w = new BufferedWriter(new OutputStreamWriter(out, "8859_1"));
-
- if (header != null) {
- w.write('#');
- w.write(header);
- w.newLine();
- }
-
- w.write('#');
- w.write(new Date().toString());
- w.newLine();
-
- while(e.hasNext()) {
- String key = (String)e.next();
- w.write(encode(key,true));
- w.write(sep);
- w.write(encode((String)p.get(key),false));
- w.newLine();
- }
- w.flush();
- }
-
- private static final String specialSaveChars = "=: \t\r\n\f#!";
-
- /**
- * Encodes a string in a way similar to the JDK's Properties method
- */
- public static String encode(String s, boolean escapeSpace)
- {
- int l=s.length();
- StringBuffer buf = new StringBuffer(l<<1);
-
- for(int i=0;i0x7e)) {
- buf.append('\\').append('u');
- buf.append(toHex((c >> 12) & 0xF));
- buf.append(toHex((c >> 8) & 0xF));
- buf.append(toHex((c >> 4) & 0xF));
- buf.append(toHex( c & 0xF));
- } else {
- if (specialSaveChars.indexOf(c) != -1)
- buf.append('\\');
- buf.append(c);
- }
- }
- }
- return buf.toString();
- }
-
- /**
- * Convert a nibble to a hex character
- * @param nibble the nibble to convert.
- */
- public static char toHex(int n) {
- return hd[(n & 0xF)];
- }
-
- /** A table of hex digits */
- private static final char[] hd = {
- '0','1','2','3','4','5','6','7',
- '8','9','A','B','C','D','E','F'
- };
-}
\ No newline at end of file
diff --git a/contrib/retep/uk/org/retep/util/misc/WStringTokenizer.java b/contrib/retep/uk/org/retep/util/misc/WStringTokenizer.java
deleted file mode 100644
index 763676cfb3b..00000000000
--- a/contrib/retep/uk/org/retep/util/misc/WStringTokenizer.java
+++ /dev/null
@@ -1,102 +0,0 @@
-package uk.org.retep.util.misc;
-
-/**
- * Similar to StringTokenizer but handles white spaces and multiple delimiters
- * between tokens. It also handles quotes
- *
- * @author
- * @version 1.0
- */
-
-public class WStringTokenizer
-{
- String string;
- int pos,len;
-
- /**
- * Constructor
- */
- public WStringTokenizer()
- {
- }
-
- /**
- * Constructor: set the initial string
- * @param aString String to tokenise
- */
- public WStringTokenizer(String aString)
- {
- setString(aString);
- }
-
- /**
- * @param aString String to tokenise
- */
- public void setString(String aString)
- {
- string=aString;
- pos=0;
- len=string.length();
- }
-
- /**
- * @return true if more tokens may be possible
- */
- public boolean hasMoreTokens()
- {
- return !(string==null || pos==len);
- }
-
- /**
- * @return next token, null if complete.
- */
- public String nextToken()
- {
- char c;
- boolean q=false;
-
- if(!hasMoreTokens())
- return null;
-
- // find start of token
- while(pos=keys.length || aColumn<0 || aColumn>=cols.length)
- return null;
-
- Object key = keys[aRow];
-
- switch(aColumn)
- {
- case 0:
- return key;
-
- case 1:
- return properties.get(key);
-
- default:
- return null;
- }
- }
-
- public int getRowCount()
- {
- return keys.length;
- }
-
- public String getColumnName(int aColumn)
- {
- return cols[aColumn];
- }
-
- public void setValueAt(Object aValue, int aRow, int aColumn)
- {
- if(aRow<0 || aRow>=keys.length || aColumn<0 || aColumn>=cols.length)
- return;
-
- switch(aColumn)
- {
- // Rename the key (only if not already present). If already present
- // the refresh() will replace with the old one anyhow...
- case 0:
- if(!properties.containsKey(aValue)) {
- Object oldValue = get(keys[aRow]);
- remove(keys[aRow]);
- put(aValue,oldValue);
- }
- refresh();
- break;
-
- // Update the value...
- case 1:
- put(keys[aRow],aValue);
- //refresh();
- break;
-
- default:
- // Should never be called
- Logger.log(Logger.ERROR,"PropertiesTableModel: Column range",aColumn);
- }
- }
-
- public boolean isCellEditable(int aRow, int aColumn)
- {
- return true;
- }
-
-}
diff --git a/contrib/retep/uk/org/retep/util/proped/Main.java b/contrib/retep/uk/org/retep/util/proped/Main.java
deleted file mode 100644
index 6f2c73bc68f..00000000000
--- a/contrib/retep/uk/org/retep/util/proped/Main.java
+++ /dev/null
@@ -1,53 +0,0 @@
-package uk.org.retep.util.proped;
-
-import uk.org.retep.util.ExceptionDialog;
-import uk.org.retep.util.Globals;
-import uk.org.retep.util.Logger;
-import uk.org.retep.util.StandaloneApp;
-
-import java.io.IOException;
-import java.util.Iterator;
-import javax.swing.JComponent;
-
-/**
- * Standalone entry point for the Properties editor
- *
- * $Id: Main.java,v 1.1 2001/03/05 09:15:38 peter Exp $
- */
-
-public class Main extends StandaloneApp
-{
- public Main(String[] args)
- throws Exception
- {
- super(args);
- }
-
- public JComponent init()
- throws Exception
- {
- Globals globals = Globals.getInstance();
-
- PropertyEditor panel = new PropertyEditor();
-
- // Only handle 1 open at a time in standalone mode
- if(globals.getArgumentCount()>0) {
- try {
- panel.openFile(globals.getArgument(0));
- } catch(IOException ioe) {
- ExceptionDialog.displayException(ioe,"while loading "+globals.getArgument(0));
- throw (Exception) ioe.fillInStackTrace();
- }
- }
-
- return panel;
- }
-
- public static void main(String[] args)
- throws Exception
- {
- Main main = new Main(args);
- main.pack();
- main.setVisible(true);
- }
-}
\ No newline at end of file
diff --git a/contrib/retep/uk/org/retep/util/proped/PropertyEditor.java b/contrib/retep/uk/org/retep/util/proped/PropertyEditor.java
deleted file mode 100644
index b5c19e10876..00000000000
--- a/contrib/retep/uk/org/retep/util/proped/PropertyEditor.java
+++ /dev/null
@@ -1,381 +0,0 @@
-package uk.org.retep.util.proped;
-
-import uk.org.retep.util.ExceptionDialog;
-import uk.org.retep.util.misc.PropertiesIO;
-import uk.org.retep.util.models.PropertiesTableModel;
-
-import java.awt.*;
-import java.io.*;
-import java.util.*;
-import javax.swing.*;
-import java.awt.event.*;
-
-/**
- * A property file editor
- *
- * $Id: PropertyEditor.java,v 1.1 2001/03/05 09:15:38 peter Exp $
- *
- * @author
- * @version 1.0
- */
-
-public class PropertyEditor
-extends JPanel
-implements uk.org.retep.tools.Tool
-{
- BorderLayout borderLayout1 = new BorderLayout();
-
- // The filename, null if not set
- String filename;
- File file;
-
- JScrollPane jScrollPane1 = new JScrollPane();
- JTable contentTable = new JTable();
-
- PropertiesTableModel model = new PropertiesTableModel();
-
- boolean standaloneMode;
-
- private static final String TITLE_PREFIX = "Retep PropertyEditor";
- JPopupMenu popupMenu = new JPopupMenu();
- JMenuItem newPopupItem = new JMenuItem();
- JMenuItem dupPopupItem = new JMenuItem();
- JMenuItem delPopupItem = new JMenuItem();
- JMenuBar menuBar = new JMenuBar();
- JMenu jMenu1 = new JMenu();
- JMenuItem jMenuItem4 = new JMenuItem();
- JMenuItem jMenuItem5 = new JMenuItem();
- JMenuItem jMenuItem6 = new JMenuItem();
- JMenuItem jMenuItem7 = new JMenuItem();
- JMenuItem jMenuItem8 = new JMenuItem();
- JMenuItem closeMenuItem = new JMenuItem();
-
- public PropertyEditor()
- {
- try
- {
- jbInit();
- }
- catch(Exception ex)
- {
- ex.printStackTrace();
- }
- }
-
- /**
- * @return the default menubar
- */
- public JMenuBar getMenuBar()
- {
- return menuBar;
- }
-
- /**
- * @return the File menu
- */
- public JMenu getMenu()
- {
- return jMenu1;
- }
-
- /**
- * @return the recomended title string for the parent JFrame/JInternalFrame
- */
- public String getTitle()
- {
- if(filename==null) {
- return TITLE_PREFIX;
- }
- return TITLE_PREFIX+": "+filename;
- }
-
- /**
- * Sets menus up to Standalone mode
- */
- public void setStandaloneMode(boolean aMode)
- {
- standaloneMode=aMode;
- if(aMode) {
- closeMenuItem.setText("Exit");
- } else {
- closeMenuItem.setText("Close");
- }
- }
-
- public boolean isStandalone()
- {
- return standaloneMode;
- }
-
- public void openFile(String aFile)
- throws IOException
- {
- openFile(new File(aFile));
- }
-
- public void openFile(File aFile)
- throws IOException
- {
- FileInputStream fis = new FileInputStream(aFile);
- Properties p = new Properties();
- p.load(fis);
- fis.close();
- model.setProperties(p);
-
- file=aFile;
- filename = aFile.getAbsolutePath();
- }
-
- public void saveFile(File aFile)
- throws IOException
- {
- FileOutputStream fis = new FileOutputStream(aFile);
- PropertiesIO.save(model.getProperties(),fis,'=',"Written by "+TITLE_PREFIX);
- fis.close();
-
- filename = aFile.getAbsolutePath();
- file = aFile;
- }
-
- void jbInit() throws Exception
- {
- this.setLayout(borderLayout1);
- contentTable.setToolTipText("");
- contentTable.setAutoResizeMode(JTable.AUTO_RESIZE_LAST_COLUMN);
- contentTable.setModel(model);
- contentTable.addMouseListener(new java.awt.event.MouseAdapter()
- {
- public void mouseClicked(MouseEvent e)
- {
- contentTable_mouseClicked(e);
- }
- public void mouseReleased(MouseEvent e)
- {
- contentTable_mouseReleased(e);
- }
- });
- newPopupItem.setText("New");
- newPopupItem.addActionListener(new java.awt.event.ActionListener()
- {
- public void actionPerformed(ActionEvent e)
- {
- newPopupItem_actionPerformed(e);
- }
- });
- dupPopupItem.setText("Duplicate");
- dupPopupItem.setAccelerator(javax.swing.KeyStroke.getKeyStroke(67, java.awt.event.KeyEvent.CTRL_MASK, false));
- dupPopupItem.addActionListener(new java.awt.event.ActionListener()
- {
- public void actionPerformed(ActionEvent e)
- {
- dupPopupItem_actionPerformed(e);
- }
- });
- delPopupItem.setText("Delete");
- delPopupItem.setAccelerator(javax.swing.KeyStroke.getKeyStroke(68, java.awt.event.KeyEvent.CTRL_MASK, false));
- delPopupItem.addActionListener(new java.awt.event.ActionListener()
- {
- public void actionPerformed(ActionEvent e)
- {
- delPopupItem_actionPerformed(e);
- }
- });
- jMenu1.setText("File");
- jMenuItem4.setText("Open");
- jMenuItem4.addActionListener(new java.awt.event.ActionListener()
- {
- public void actionPerformed(ActionEvent e)
- {
- jMenuItem4_actionPerformed(e);
- }
- });
- jMenuItem5.setText("Save");
- jMenuItem5.addActionListener(new java.awt.event.ActionListener()
- {
- public void actionPerformed(ActionEvent e)
- {
- jMenuItem5_actionPerformed(e);
- }
- });
- jMenuItem6.setText("Save As");
- jMenuItem6.addActionListener(new java.awt.event.ActionListener()
- {
- public void actionPerformed(ActionEvent e)
- {
- jMenuItem6_actionPerformed(e);
- }
- });
- jMenuItem7.setText("Revert");
- jMenuItem7.addActionListener(new java.awt.event.ActionListener()
- {
- public void actionPerformed(ActionEvent e)
- {
- jMenuItem7_actionPerformed(e);
- }
- });
- jMenuItem8.setText("Print");
- closeMenuItem.setText("Close");
- closeMenuItem.addActionListener(new java.awt.event.ActionListener()
- {
- public void actionPerformed(ActionEvent e)
- {
- closeMenuItem_actionPerformed(e);
- }
- });
- jMenu2.setText("Edit");
- jMenuItem1.setText("New");
- jMenuItem1.setAccelerator(javax.swing.KeyStroke.getKeyStroke(78, java.awt.event.KeyEvent.CTRL_MASK, false));
- jMenuItem1.addActionListener(new java.awt.event.ActionListener()
- {
- public void actionPerformed(ActionEvent e)
- {
- newPopupItem_actionPerformed(e);
- }
- });
- jMenuItem2.setText("Duplicate");
- jMenuItem3.setText("Delete");
- this.add(jScrollPane1, BorderLayout.CENTER);
- jScrollPane1.getViewport().add(contentTable, null);
- popupMenu.add(newPopupItem);
- popupMenu.add(dupPopupItem);
- popupMenu.add(delPopupItem);
- menuBar.add(jMenu1);
- menuBar.add(jMenu2);
- jMenu1.add(jMenuItem4);
- jMenu1.add(jMenuItem5);
- jMenu1.add(jMenuItem6);
- jMenu1.add(jMenuItem7);
- jMenu1.addSeparator();
- jMenu1.add(jMenuItem8);
- jMenu1.addSeparator();
- jMenu1.add(closeMenuItem);
- jMenu2.add(jMenuItem1);
- jMenu2.add(jMenuItem2);
- jMenu2.add(jMenuItem3);
- }
-
- Point popupPoint = new Point();
- JMenu jMenu2 = new JMenu();
- JMenuItem jMenuItem1 = new JMenuItem();
- JMenuItem jMenuItem2 = new JMenuItem();
- JMenuItem jMenuItem3 = new JMenuItem();
- void contentTable_mouseClicked(MouseEvent e)
- {
- if(e.isPopupTrigger()) {
- popupPoint.setLocation(e.getX(),e.getY());
- popupMenu.show(contentTable,e.getX(),e.getY());
- }
- }
-
- void contentTable_mouseReleased(MouseEvent e)
- {
- contentTable_mouseClicked(e);
- }
-
- void jMenuItem4_actionPerformed(ActionEvent e)
- {
- JFileChooser fc = new JFileChooser();
- if(fc.showOpenDialog(this) == JFileChooser.APPROVE_OPTION) {
- try {
- openFile(fc.getSelectedFile());
- } catch(IOException ioe) {
- ExceptionDialog.displayException(ioe);
- }
- }
- }
-
- void closeMenuItem_actionPerformed(ActionEvent e)
- {
- if(standaloneMode) {
- System.exit(0);
- } else {
- filename="";
- file=null;
- model.setProperties(new Properties());
- }
- }
-
- void newPopupItem_actionPerformed(ActionEvent e)
- {
- int y = contentTable.rowAtPoint(popupPoint);
-
- // create a new unique key based on the current one
- String key=(String) model.getValueAt(y,0);
-
- if(key==null) {
- key="new-key";
- }
-
- int uid=1;
- while(model.containsKey(key+uid)) {
- uid++;
- }
-
- key=key+uid;
- model.put(key,"");
- contentTable.clearSelection();
- }
-
- void dupPopupItem_actionPerformed(ActionEvent e)
- {
- int y = contentTable.rowAtPoint(popupPoint);
-
- // create a new unique key based on the current one
- String key=(String) model.getValueAt(y,0);
- Object val=model.get(key);
-
- int uid=1;
- while(model.containsKey(key+uid)) {
- uid++;
- }
-
- key=key+uid;
- model.put(key,val);
- contentTable.clearSelection();
- }
-
- void delPopupItem_actionPerformed(ActionEvent e)
- {
- int y = contentTable.rowAtPoint(popupPoint);
- model.remove(model.getValueAt(y,0));
- }
-
- void jMenuItem6_actionPerformed(ActionEvent e)
- {
- JFileChooser fc = new JFileChooser();
- if(fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION) {
- try {
- saveFile(fc.getSelectedFile());
- } catch(IOException ioe) {
- ExceptionDialog.displayException(ioe);
- }
- }
- }
-
- void jMenuItem5_actionPerformed(ActionEvent e)
- {
- if(filename==null) {
- jMenuItem6_actionPerformed(e);
- } else {
- try {
- saveFile(file);
- } catch(IOException ioe) {
- ExceptionDialog.displayException(ioe);
- }
- }
- }
-
- void jMenuItem7_actionPerformed(ActionEvent e)
- {
- // add check here
- if(file!=null) {
- try {
- openFile(file);
- } catch(IOException ioe) {
- ExceptionDialog.displayException(ioe);
- }
- } else {
- jMenuItem4_actionPerformed(e);
- }
- }
-}
\ No newline at end of file
diff --git a/contrib/retep/uk/org/retep/xml/core/XMLFactory.java b/contrib/retep/uk/org/retep/xml/core/XMLFactory.java
deleted file mode 100644
index 09565b00110..00000000000
--- a/contrib/retep/uk/org/retep/xml/core/XMLFactory.java
+++ /dev/null
@@ -1,334 +0,0 @@
-package uk.org.retep.xml.core;
-
-import java.io.IOException;
-import java.io.Writer;
-
-/**
- * An XMLFactory is used to render XML Tags, accounting for nesting etc
- */
-public class XMLFactory
-{
- /**
- * The lest level (ie, how many tags down the tree we are)
- */
- protected int level;
-
- /**
- * The size of our tag name cache
- */
- protected int maxlevel;
-
- /**
- * Our tag name cache
- */
- protected String[] names;
-
- /**
- * Used to keep track of how formatting is done
- */
- protected boolean hascontent;
- protected boolean[] contbuf;
-
- /**
- * Scratch used by nest()
- */
- private char[] nestbuf;
-
- /**
- * The destination Writer
- */
- protected Writer out;
-
- /**
- * True if we are still within a tag
- */
- protected boolean inTag;
-
- /**
- * True if we have just created a tag so parameters are valid
- */
- protected boolean inArg;
-
- /**
- * Constructs an XMLFactory with no output Writer
- */
- public XMLFactory()
- {
- this(10);
- }
-
- /**
- * Constructs an XMLFactory with no output Writer
- * @param m Expected number of leaves in the XML Tree
- */
- public XMLFactory(int m)
- {
- // Initialise the names cache
- level=0;
- maxlevel=m;
- names=new String[maxlevel];
- contbuf=new boolean[maxlevel];
-
- // This is used by nest()
- nestbuf=new char[maxlevel];
- for(int i=0;i\n");
- }
-
- /**
- * @return Writer the XML is being sent out on.
- */
- public Writer getWriter() {
- return out;
- }
-
- /**
- * This starts a tag
- * @param name The tag name
- */
- public void startTag(String name)
- throws IOException
- {
- if(inTag && inArg) {
- // Handles two startTag() calls in succession.
- out.write(">");
- }
-
- nest(level);
- out.write('<');
- out.write(name);
- inTag=true;
- inArg=true;
-
- // cache the current tag name
- names[level]=name;
-
- // cache the current hascontent value & reset
- contbuf[level]=hascontent;
- hascontent=false;
-
- // increase the level and the cache's as necessary
- level++;
- if(level>maxlevel) {
- maxlevel=maxlevel+10;
-
- String n[]=new String[maxlevel];
- System.arraycopy(names,0,n,0,level);
- names=n;
-
- boolean b[] = new boolean[maxlevel];
- System.arraycopy(contbuf,0,b,0,level);
- contbuf=b;
- }
- }
-
- /**
- * This ends a tag
- */
- public void endTag()
- throws IOException, XMLFactoryException
- {
- if(level<1)
- throw new XMLFactoryException("endTag called above root node");
-
- level--;
-
- if(inArg) {
- // We are still within the opening tag
- out.write(" />");
- } else {
- // We must have written some content or child tags
-
- // hascontent is true if addContent() was called. If it was never called
- // to get here some child tags must have been written, so we call nest()
- // so that the close tag is on it's own line, and everything looks neat
- // and tidy.
- if(!hascontent)
- nest(level);
-
- out.write("");
- out.write(names[level]);
- out.write('>');
- }
-
- inArg=false; // The parent tag must be told it now has content
- inTag= level>0; // Are we still in a tag?
- hascontent=contbuf[level]; // retrieve this level's hascontent value
- }
-
- /**
- * This completes the document releasing any open resources.
- */
- public void close()
- throws IOException, XMLFactoryException
- {
- while(level>0)
- endTag();
- out.write('\n');
- out.flush();
- }
-
- /**
- * This writes an attribute to the current tag. If the value is null, then no action is taken.
- * @param name Name of the parameter
- * @param value Value of the parameter
- * @throw XMLFactoryException if out of context
- */
- public void addAttribute(String name,Object value)
- throws IOException, XMLFactoryException
- {
- if(value==null)
- return;
-
- if(inArg) {
- out.write(' ');
- out.write(name);
- out.write("=\"");
- out.write(encode(value.toString()));
- out.write("\"");
- } else
- throw new XMLFactoryException("Cannot add attribute outside of a tag");
- }
-
- /**
- * This writes some content to the current tag. Once this has been called,
- * you cannot add any more attributes to the current tag. Note, if c is null,
- * no action is taken.
- * @param c content to add.
- */
- public void addContent(Object c)
- throws IOException, XMLFactoryException
- {
- if(c==null)
- return;
-
- if(inTag) {
- if(inArg) {
- // close the open tag
- out.write('>');
- inArg=false;
- }
- out.write(c.toString());
-
- // This is used by endTag()
- hascontent=true;
- } else
- throw new XMLFactoryException("Cannot add content outside of a tag");
- }
-
- /**
- * This adds a comment to the XML file. This is normally used at the start of
- * any XML output.
- * @parm c Comment to include
- */
- public void addComment(Object c)
- throws IOException, XMLFactoryException
- {
- if(inTag)
- throw new XMLFactoryException("Cannot add comments within a tag");
-
- out.write("\n");
- }
-
- /**
- * Indents the output according to the level
- * @param level The indent level to generate
- */
- protected void nest(int level)
- throws IOException
- {
- out.write('\n');
- while(level>nestbuf.length) {
- out.write(nestbuf,0,nestbuf.length);
- level-=nestbuf.length;
- }
- out.write(nestbuf,0,level);
- }
-
- /**
- * Encodes the string so that any XML tag chars are translated
- */
- protected String encode(String s) {
- return s;
- }
-
-}
\ No newline at end of file
diff --git a/contrib/retep/uk/org/retep/xml/core/XMLFactoryException.java b/contrib/retep/uk/org/retep/xml/core/XMLFactoryException.java
deleted file mode 100644
index 5f9d4972097..00000000000
--- a/contrib/retep/uk/org/retep/xml/core/XMLFactoryException.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package uk.org.retep.xml.core;
-
-/**
- * Title:
- * Description:
- * Copyright: Copyright (c) 2001
- * Company:
- * @author
- * @version 1.0
- */
-
-public class XMLFactoryException extends Exception
-{
-
- public XMLFactoryException(String s)
- {
- super(s);
- }
-}
\ No newline at end of file
diff --git a/contrib/retep/uk/org/retep/xml/jdbc/XMLDatabase.java b/contrib/retep/uk/org/retep/xml/jdbc/XMLDatabase.java
deleted file mode 100644
index 50aaaa1d905..00000000000
--- a/contrib/retep/uk/org/retep/xml/jdbc/XMLDatabase.java
+++ /dev/null
@@ -1,237 +0,0 @@
-package uk.org.retep.xml.jdbc;
-
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.DatabaseMetaData;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import uk.org.retep.xml.core.XMLFactory;
-import uk.org.retep.xml.core.XMLFactoryException;
-
-public class XMLDatabase
-{
- /**
- * The XMLFactory being used by this instance
- */
- protected XMLFactory factory;
-
- /**
- * Constructor. setXMLFactory() must be called if this method is used.
- */
- public XMLDatabase()
- {
- }
-
- /**
- * Constructor
- * @param fac XMLFactory to use
- */
- public XMLDatabase(XMLFactory fac)
- {
- this();
- setXMLFactory(fac);
- }
-
- /**
- * Sets the factory to use.
- * @param factory XMLFactory to use
- */
- public void setXMLFactory(XMLFactory factory)
- {
- this.factory=factory;
- }
-
- /**
- * @return the XMLFactory being used.
- */
- public XMLFactory getXMLFactory()
- {
- return factory;
- }
-
- /**
- * Flushes all output to the Writer.
- * @throw IOException from Writer
- * @throw XMLFactoryException from XMLFactory
- */
- public void close()
- throws IOException, XMLFactoryException
- {
- factory.close();
- }
-
- /**
- * writes the schema of a table.
- * @param con Connection to database
- * @param table Table name
- * @throw IOException from Writer
- * @throw SQLException from JDBC
- * @throw XMLFactoryException from XMLFactory
- */
- public void writeTable(Connection con,String table)
- throws IOException,SQLException,XMLFactoryException
- {
- writeTable(con.getMetaData(),table);
- }
-
- /**
- * writes the schema of a table.
- * @param db DatabaseMetaData for the database
- * @param table Table name
- * @throw IOException from Writer
- * @throw SQLException from JDBC
- * @throw XMLFactoryException from XMLFactory
- */
- public void writeTable(DatabaseMetaData db,String table)
- throws IOException,SQLException,XMLFactoryException
- {
- writeTable(db,null,null,table);
- }
-
- /**
- * writes the schema of a table.
- * @param db DatabaseMetaData for the database
- * @param table Table name
- * @throw IOException from Writer
- * @throw SQLException from JDBC
- * @throw XMLFactoryException from XMLFactory
- */
- public void writeTable(DatabaseMetaData db,String cat,String schem,String table)
- throws IOException,SQLException,XMLFactoryException
- {
- ResultSet trs;
-
- factory.startTag("TABLE");
- factory.addAttribute("NAME",table);
- // fetch the remarks for this table (if any)
- trs = db.getTables(null,null,table,null);
- if(trs!=null) {
- if(trs.next()) {
- String rem = trs.getString(5);
- if(rem!=null)
- factory.addContent(rem);
- }
- trs.close();
- }
-
- trs = db.getColumns(null,null,table,"%");
- if(trs!=null) {
- while(trs.next()) {
- factory.startTag("COLUMN");
- factory.addAttribute("NAME",trs.getString(4));
- factory.addAttribute("TYPE",trs.getString(6));
- factory.addAttribute("COLUMN_SIZE",trs.getString(7));
- factory.addAttribute("DECIMAL_DIGITS",trs.getString(9));
- factory.addAttribute("NUM_PREC_RADIX",trs.getString(10));
- factory.addAttribute("NULLABLE",trs.getString(11));
- factory.addAttribute("COLUMN_DEF",trs.getString(13));
- factory.addAttribute("CHAR_OCTET_LENGTH",trs.getString(16));
- factory.addAttribute("ORDINAL_POSITION",trs.getString(17));
- factory.addAttribute("IS_NULLABLE",trs.getString(18));
- factory.addAttribute("TABLE_CAT",trs.getString(1));
- factory.addAttribute("TABLE_SCHEM",trs.getString(2));
- String rem = trs.getString(12);
- if(rem!=null)
- factory.addContent(rem);
- factory.endTag();
- }
- trs.close();
- }
-
- factory.endTag();
- }
-
- /**
- * This generates the schema of an entire database.
- * @param db Connection to database
- * @param table Table pattern
- * @throw IOException from Writer
- * @throw SQLException from JDBC
- * @throw XMLFactoryException from XMLFactory
- * @see java.sql.DatabaseMetaData.getTables()
- */
- public void writeDatabase(Connection db,String table)
- throws IOException, SQLException, XMLFactoryException
- {
- writeDatabase(db.getMetaData(),null,null,table);
- }
-
- /**
- * This generates the schema of an entire database.
- * @param db DatabaseMetaData of database
- * @param table Table pattern
- * @throw IOException from Writer
- * @throw SQLException from JDBC
- * @throw XMLFactoryException from XMLFactory
- * @see java.sql.DatabaseMetaData.getTables()
- */
- public void writeDatabase(DatabaseMetaData db,String table)
- throws IOException, SQLException, XMLFactoryException
- {
- writeDatabase(db,null,null,table);
- }
-
- /**
- * This generates the schema of an entire database.
- * @param db DatabaseMetaData of database
- * @param cat Catalog (may be null)
- * @param schem Schema (may be null)
- * @param table Table pattern
- * @throw IOException from Writer
- * @throw SQLException from JDBC
- * @throw XMLFactoryException from XMLFactory
- * @see java.sql.DatabaseMetaData.getTables()
- */
- public void writeDatabase(Connection db)
- throws IOException, SQLException, XMLFactoryException
- {
- writeDatabase(db.getMetaData(),null,null,"%");
- }
-
- /**
- * This generates the schema of an entire database.
- * @param db DatabaseMetaData of database
- * @param cat Catalog (may be null)
- * @param schem Schema (may be null)
- * @param table Table pattern
- * @throw IOException from Writer
- * @throw SQLException from JDBC
- * @throw XMLFactoryException from XMLFactory
- * @see java.sql.DatabaseMetaData.getTables()
- */
- public void writeDatabase(DatabaseMetaData db)
- throws IOException, SQLException, XMLFactoryException
- {
- writeDatabase(db,null,null,"%");
- }
-
- /**
- * This generates the schema of an entire database.
- * @param db DatabaseMetaData of database
- * @param cat Catalog (may be null)
- * @param schem Schema (may be null)
- * @param table Table pattern
- * @throw IOException from Writer
- * @throw SQLException from JDBC
- * @throw XMLFactoryException from XMLFactory
- * @see java.sql.DatabaseMetaData.getTables()
- */
- public void writeDatabase(DatabaseMetaData db,String cat,String schem,String table)
- throws IOException, SQLException, XMLFactoryException
- {
- ResultSet rs = db.getTables(cat,schem,table,null);
- if(rs!=null) {
- factory.startTag("DATABASE");
- factory.addAttribute("PRODUCT",db.getDatabaseProductName());
- factory.addAttribute("VERSION",db.getDatabaseProductVersion());
-
- while(rs.next()) {
- writeTable(db,rs.getString(1),rs.getString(2),rs.getString(3));
- }
-
- factory.endTag();
- rs.close();
- }
- }
-
-}
\ No newline at end of file
diff --git a/contrib/retep/uk/org/retep/xml/jdbc/XMLResultSet.java b/contrib/retep/uk/org/retep/xml/jdbc/XMLResultSet.java
deleted file mode 100644
index ee020df940d..00000000000
--- a/contrib/retep/uk/org/retep/xml/jdbc/XMLResultSet.java
+++ /dev/null
@@ -1,505 +0,0 @@
-package uk.org.retep.xml.jdbc;
-
-import java.io.IOException;
-import java.io.Writer;
-import java.sql.ResultSet;
-import java.sql.ResultSetMetaData;
-import java.sql.SQLException;
-import java.util.Properties;
-import uk.org.retep.xml.core.XMLFactory;
-import uk.org.retep.xml.core.XMLFactoryException;
-
-/**
- * This class takes a java.sql.ResultSet object and generates an XML stream
- * based on it's contents.
- *
- * $Id: XMLResultSet.java,v 1.1 2001/01/23 10:22:20 peter Exp $
- */
-public class XMLResultSet
-{
- /**
- * The current ResultSet to process
- */
- protected ResultSet rs;
-
- /**
- * The XMLFactory being used by this instance
- */
- protected XMLFactory factory;
-
- /**
- * The default properties used when none are supplied by the user
- */
- protected static Properties defaults;
-
- /**
- * The default property name for defining the tag name used to define a
- * ResultSet
- */
- public static String RESULTSET_NAME = "resultset.name";
-
- /**
- * The default tag name for a resultset
- */
- public static String DEFAULT_RESULTSET_NAME = "RESULTSET";
-
- /**
- * The default property name for defining the tag name used to define a row
- */
- public static String ROW_NAME = "row.name";
-
- /**
- * The default tag name for a row
- */
- public static String DEFAULT_ROW_NAME = "RECORD";
-
- /**
- * The default tag name for a resultset
- */
- public static String COLNAME = ".name";
-
- /**
- * The value of the property (named as its related column) used to define
- * how the column is generated. This indicates that the columns data is
- * enclosed within a pair of tags, ie: <id>1234</id>
- */
- public static String CONTENT = "content";
-
- /**
- * The value of the property (named as its related column) used to define
- * how the column is generated. This indicates that the columns data is
- * an attribute in the columns tag. ie:
- */
- public static String ATTRIBUTE = "attribute";
-
- /**
- * This is the default attribute name used when the ATTRIBUTE option is set.
- */
- public static String DEFAULT_ATTRIBUTE = "VALUE";
-
- /**
- * The value of the property (named as its related column) used to define
- * how the column is generated. This indicates that the columns data is
- * an attribute in the parent's tag. ie:
- */
- public static String ROW_ATTRIBUTE = "row";
-
- /**
- * This property name marks the begining row number within the ResultSet to
- * start processing.
- */
- public static String FIRST_ROW = "row.first";
-
- /**
- * This property name marks the last row number within the ResultSet to
- * end processing.
- */
- public static String LAST_ROW = "row.last";
-
- /**
- * Constructor
- */
- public XMLResultSet()
- {
- factory = new XMLFactory();
- }
-
- /**
- * Constructor
- */
- public XMLResultSet(ResultSet rs)
- {
- this();
- setResultSet(rs);
- }
-
- /**
- * Sets the ResultSet to use
- * @param rs ResultSet
- */
- public void setResultSet(ResultSet rs)
- {
- this.rs=rs;
- }
-
- /**
- * @return the current ResultSet
- *
- */
- public ResultSet getResultSet()
- {
- return rs;
- }
-
- /**
- * Sets the Writer to send all output to
- * @param out Writer
- * @throws IOException from XMLFactory
- * @see XMLFactory.setWriter
- */
- public void setWriter(Writer out)
- throws IOException
- {
- factory.setWriter(out);
- }
-
- /**
- * @return Writer output is going to
- */
- public Writer getWriter()
- {
- return factory.getWriter();
- }
-
- /**
- * @return XMLFactory being used
- */
- public XMLFactory getXMLFactory()
- {
- return factory;
- }
-
- /**
- * Flushes all output to the Writer
- * @throw IOException from Writer
- * @throw XMLFactoryException from XMLFactory
- */
- public void close()
- throws IOException, XMLFactoryException
- {
- factory.close();
- }
-
- /**
- * Returns the default properties used by translate() and buildDTD()
- * @return Properties default property settings
- */
- public static Properties getDefaultProperties()
- {
- if(defaults==null) {
- defaults=new Properties();
- defaults.setProperty(RESULTSET_NAME,DEFAULT_RESULTSET_NAME);
- defaults.setProperty(ROW_NAME,DEFAULT_ROW_NAME);
- }
- return defaults;
- }
-
- /**
- * This generates an XML version of a ResultSet sending it to the supplied
- * Writer.
- * @param rs ResultSet to convert
- * @param p Properties for the conversion
- * @param out Writer to send output to (replaces existing one)
- * @throws XMLFactoryException from XMLFactory
- * @throws IOException from Writer
- * @throws SQLException from ResultSet
- */
- public void translate(ResultSet rs,Properties p,Writer out)
- throws XMLFactoryException, IOException, SQLException
- {
- factory.setWriter(out);
- translate(rs,p);
- }
-
- /**
- * This generates an XML version of a ResultSet sending it to the supplied
- * Writer using a default tag struct
- * @param rs ResultSet to convert
- * @param out Writer to send output to (replaces existing one)
- * @throws XMLFactoryException from XMLFactory
- * @throws IOException from Writer
- * @throws SQLException from ResultSet
- */
- public void translate(ResultSet rs,Writer out)
- throws XMLFactoryException, IOException, SQLException
- {
- factory.setWriter(out);
- translate(rs,(Properties)null);
- }
-
- /**
- * This generates an XML version of a ResultSet sending it to the current
- * output stream using a default tag structure.
- * @param rs ResultSet to convert
- * @throws XMLFactoryException from XMLFactory
- * @throws IOException from Writer
- * @throws SQLException from ResultSet
- */
- public void translate(ResultSet rs)
- throws XMLFactoryException, IOException, SQLException
- {
- translate(rs,(Properties)null);
- }
-
- /**
- * This generates an XML version of a ResultSet sending it to the current
- * output stream.
- * @param rs ResultSet to convert
- * @param p Properties for the conversion
- * @throws XMLFactoryException from XMLFactory
- * @throws IOException from Writer
- * @throws SQLException from ResultSet
- */
- public void translate(ResultSet rs,Properties p)
- throws XMLFactoryException, IOException, SQLException
- {
- // if we don't pass any properties, create an empty one and cache it if
- // further calls do the same
- if(p==null) {
- p=getDefaultProperties();
- }
-
- // Fetch some common values
- String setName = p.getProperty(RESULTSET_NAME,DEFAULT_RESULTSET_NAME);
- String rowName = p.getProperty(ROW_NAME,DEFAULT_ROW_NAME);
-
- ResultSetMetaData rsmd = rs.getMetaData();
- int numcols = rsmd.getColumnCount();
-
- String colname[] = new String[numcols]; // field name cache
- int coltype[] = new int[numcols]; // true to use attribute false content
- String colattr[] = new String[numcols]; // Attribute name
-
- // These deal with when an attribute is to go into the row's tag parameters
- int parentFields[] = getRowAttributes(numcols,colname,colattr,coltype,rsmd,p); // used to cache the id's
- int numParents= parentFields==null ? 0 : parentFields.length; // number of parent fields
- boolean haveParent= numParents>0; // true only if we need to us these
-
- // This allows some limiting of the output result
- int firstRow = Integer.parseInt(p.getProperty(FIRST_ROW,"0"));
- int lastRow = Integer.parseInt(p.getProperty(LAST_ROW,"0"));
- int curRow=0;
-
- // Start the result set's tag
- factory.startTag(setName);
-
- while(rs.next()) {
- if(firstRow<=curRow && (lastRow==0 || curRowToDo:
- *
Add ability to have NULLABLE columns appear as optional (ie instead of
- * x, have x? (DTD for Optional). Can't use + or * as that indicates more than
- * 1 instance).
- *
- *
- * @param rs ResultSet
- * @param p Properties defining tag types (as translate)
- * @param out Writer to send output to
- */
- public void buildDTD(ResultSet rs,Properties p,Writer out)
- throws IOException, SQLException
- {
- // if we don't pass any properties, create an empty one and cache it if
- // further calls do the same
- if(p==null) {
- p=getDefaultProperties();
- }
-
- // Fetch some common values
- String setName = p.getProperty(RESULTSET_NAME,DEFAULT_RESULTSET_NAME);
- String rowName = p.getProperty(ROW_NAME,DEFAULT_ROW_NAME);
-
- ResultSetMetaData rsmd = rs.getMetaData();
- int numcols = rsmd.getColumnCount();
-
- String colname[] = new String[numcols]; // field name cache
- int coltype[] = new int[numcols]; // true to use attribute false content
- String colattr[] = new String[numcols]; // Attribute name
-
- // These deal with when an attribute is to go into the row's tag parameters
- int parentFields[] = getRowAttributes(numcols,colname,colattr,coltype,rsmd,p); // used to cache the id's
- int numParents= parentFields==null ? 0 : parentFields.length; // number of parent fields
- boolean haveParent= numParents>0; // true only if we need to us these
-
- // Now the dtd defining the ResultSet
- out.write("\n");
-
- // Now the dtd defining each row
- out.write("\n");
-
- // Now handle any ROW_ATTRIBUTE's
- if(haveParent) {
- out.write("\n");
- }
-
- // Now add any CONTENT & ATTRIBUTE fields
- for(int i=0;i\n");
-
- // ATTRIBUTE
- if(coltype[i]==1) {
- out.write("\n");
- }
- }
- }
- }
-
- /**
- * Private method used by the core translate and buildDTD methods.
- * @param numcols Number of columns in ResultSet
- * @param colname Array of column names
- * @param colattr Array of column attribute names
- * @param coltype Array of column types
- * @param rsmd ResultSetMetaData for ResultSet
- * @param p Properties being used
- * @return array containing field numbers which should appear as attributes
- * within the rows tag.
- * @throws SQLException from JDBC
- */
- private int[] getRowAttributes(int numcols,
- String colname[],String colattr[],
- int coltype[],
- ResultSetMetaData rsmd,Properties p)
- throws SQLException
- {
- int pf[] = null;
- int nf = 0;
-
- // Now we put a columns value as an attribute if the property
- // fieldname=attribute (ie myname=attribute)
- // and if the fieldname.name property exists, use it as the attribute name
- for(int i=0;i0) {
- int r[] = new int[nf];
- System.arraycopy(pf,0,r,0,nf);
- return r;
- }
-
- // Return null if no tags are to appear as attributes to the row's tag
- return null;
- }
-
-}
\ No newline at end of file
diff --git a/contrib/retep/uk/org/retep/xml/parser/TagHandler.java b/contrib/retep/uk/org/retep/xml/parser/TagHandler.java
deleted file mode 100644
index 78deb0ec0aa..00000000000
--- a/contrib/retep/uk/org/retep/xml/parser/TagHandler.java
+++ /dev/null
@@ -1,201 +0,0 @@
-package uk.org.retep.xml.parser;
-
-import java.io.CharArrayWriter;
-import java.io.IOException;
-import java.util.List;
-import java.util.Iterator;
-import java.util.HashSet;
-import java.util.ArrayList;
-import java.util.HashMap;
-import org.xml.sax.AttributeList;
-import org.xml.sax.HandlerBase;
-import org.xml.sax.InputSource;
-import org.xml.sax.Parser;
-import org.xml.sax.SAXException;
-import javax.xml.parsers.ParserConfigurationException;
-import javax.xml.parsers.SAXParser;
-import javax.xml.parsers.SAXParserFactory;
-
-/**
- * This class implements the base of the XML handler. You create an instance,
- * register classes (who implement TagListener) that are interested in the tags
- * and pass it to SAX.
- *
- *
Or you create an instance, register the TagListeners and use the getParser()
- * method to create a Parser. Then start parsing by calling it's parse() method.
- */
-
-public class TagHandler extends HandlerBase {
-
- /**
- * The current active level
- */
- private int level;
-
- /**
- * cache used to handle nesting of tags
- */
- private List contents;
-
- /**
- * cache used to handle nesting of tags
- */
- private List tags;
-
- /**
- * cache used to handle nesting of tags
- */
- private List args;
-
- // Current active content writer
- private CharArrayWriter content;
-
- // List of TagListener's who want to be fed data
- private HashSet tagListeners;
-
- /**
- * default constructor
- */
- public TagHandler() {
- level=0;
- contents = new ArrayList();
- tags = new ArrayList();
- args = new ArrayList();
- tagListeners = new HashSet();
- }
-
- /**
- * Called by SAX when a tag is begun. This simply creates a new level in the
- * cache and stores the parameters and tag name in there.
- */
- public void startElement(String p0, AttributeList p1) throws SAXException {
-
- // Now move up and fetch a CharArrayWriter from the cache
- // creating if this is the first time at this level
- if(contents.size()<=level) {
- contents.add(new CharArrayWriter());
- tags.add(p0);
- args.add(new HashMap());
- }
-
- content=(CharArrayWriter) contents.get(level);
- content.reset();
-
- // Also cache the tag's text and argument list
- tags.set(level,p0);
-
- HashMap h = (HashMap) args.get(level);
- h.clear();
- for(int i=p1.getLength()-1;i>-1;i--) {
- h.put(p1.getName(i),p1.getValue(i));
- }
-
- // Now notify any TagListeners
- Iterator it = tagListeners.iterator();
- while(it.hasNext())
- ( (TagListener) it.next() ).tagStart(level,p0,h);
-
- // Now move up a level
- level++;
- }
-
- /**
- * This is called by SAX at the end of a tag. This calls handleTag() and then
- * raises the level, so that the previous parent tag may continue.
- */
- public void endElement(String p0) throws SAXException {
- // move up a level retrieving that level's current content
- // Now this exception should never occur as the underlying parser should
- // actually trap it.
- if(level<1)
- throw new SAXException("Already at top level?");
- level--;
-
- // Now notify any TagListeners
- Iterator it = tagListeners.iterator();
- while(it.hasNext())
- ( (TagListener) it.next() ).tagContent(content);
-
- // allows large content to be released early
- content.reset();
-
- // Now reset content to the previous level
- content=(CharArrayWriter) contents.get(level);
- }
-
- /**
- * Called by SAX so that content between the start and end tags are captured.
- */
- public void characters(char[] p0, int p1, int p2) throws SAXException {
- content.write(p0,p1,p2);
- }
-
- /**
- * Adds a TagListener so that it is notified of tags as they are processed.
- * @param handler TagListener to add
- */
- public void addTagListener(TagListener h) {
- tagListeners.add(h);
- }
-
- /**
- * Removes the TagListener so it no longer receives notifications of tags
- */
- public void removeTagListener(TagListener h) {
- tagListeners.remove(h);
- }
-
- /**
- * This method returns a org.xml.sax.Parser object that will parse the
- * contents of a URI.
- *
- *
Normally you would call this method, then call the parse(uri) method of
- * the returned object.
- * @return org.xml.sax.Parser object
- */
- public Parser getParser()
- throws SAXException
- {
- try {
- SAXParserFactory spf = SAXParserFactory.newInstance();
-
- String validation = System.getProperty ("javax.xml.parsers.validation", "false");
- if (validation.equalsIgnoreCase("true"))
- spf.setValidating (true);
-
- SAXParser sp = spf.newSAXParser();
- Parser parser = sp.getParser ();
-
- parser.setDocumentHandler(this);
-
- return(parser);
- } catch(ParserConfigurationException pce) {
- throw new SAXException(pce.toString());
- }
- }
-
- /**
- * This method will parse the specified URI.
- *
- *
Internally this is the same as getParser().parse(uri);
- * @param uri The URI to parse
- */
- public void parse(String uri)
- throws IOException, SAXException
- {
- getParser().parse(uri);
- }
-
- /**
- * This method will parse the specified InputSource.
- *
- *
Internally this is the same as getParser().parse(is);
- * @param is The InputSource to parse
- */
- public void parse(InputSource is)
- throws IOException, SAXException
- {
- getParser().parse(is);
- }
-
-}
\ No newline at end of file
diff --git a/contrib/retep/uk/org/retep/xml/parser/TagListener.java b/contrib/retep/uk/org/retep/xml/parser/TagListener.java
deleted file mode 100644
index 24146b3e245..00000000000
--- a/contrib/retep/uk/org/retep/xml/parser/TagListener.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package uk.org.retep.xml.parser;
-
-import java.util.HashMap;
-import java.io.CharArrayWriter;
-
-/**
- * This interface defines the methods a class needs to implement if it wants the
- * xml parser to notify it of any xml tags.
- */
-
-public interface TagListener {
- /**
- * This is called when a tag has just been started.
- *
NB: args is volatile, so if you use it beyond the lifetime of
- * this call, then you must make a copy of the HashMap (and not use simply
- * store this HashMap).
- * @param level The number of tags above this
- * @param tag The tag name
- * @param args A HashMap of any arguments
- */
- public void tagStart(int level,String tag,HashMap args);
- /**
- * This method is called by ContHandler to process a tag once it has been
- * fully processed.
- *
NB: content is volatile, so you must copy its contents if you use
- * it beyond the lifetime of this call.
- * @param content CharArrayWriter containing the content of the tag.
- */
- public void tagContent(CharArrayWriter content);
-}
\ No newline at end of file
diff --git a/contrib/retep/uk/org/retep/xml/test/XMLExport.java b/contrib/retep/uk/org/retep/xml/test/XMLExport.java
deleted file mode 100644
index 116f2509060..00000000000
--- a/contrib/retep/uk/org/retep/xml/test/XMLExport.java
+++ /dev/null
@@ -1,191 +0,0 @@
-package uk.org.retep.xml.test;
-
-import java.lang.Exception;
-import java.io.*;
-import java.sql.*;
-import java.util.Properties;
-import uk.org.retep.xml.core.XMLFactoryException;
-import uk.org.retep.xml.jdbc.XMLDatabase;
-import uk.org.retep.xml.jdbc.XMLResultSet;
-
-/**
- * This "test" class is a fully functional tool in its own right. It utilises
- * the xml classes to query and export to XML, or to dump database structures
- * into XML.
- */
-
-public class XMLExport
-{
- /**
- * The current Database Connection
- */
- protected Connection conn;
- protected Statement stat;
- protected String drvr,url,table;
-
- protected XMLResultSet xrs;
- protected XMLDatabase xdb;
- protected Properties prop;
- protected boolean outXML;
- protected boolean outDTD;
- protected boolean outTAB;
- protected int maxRows=0;
-
- public XMLExport(String[] args)
- throws IOException,SQLException,XMLFactoryException,ClassNotFoundException
- {
- xrs = new XMLResultSet();
- xrs.setWriter(new OutputStreamWriter(System.out));
- //Properties p = new Properties(xrs.getDefaultProperties());
- prop = (Properties) xrs.getDefaultProperties().clone();
-
- xdb = new XMLDatabase(xrs.getXMLFactory());
-
- for(int i=0;i2) {
- String table=arg.substring(2);
- System.out.println("Generating XML Schema of table "+table);
- xdb.writeTable(conn,table);
- xdb.close();
- } else {
- System.out.println("Generating XML Schema of database");
- xdb.writeDatabase(conn);
- xdb.close();
- }
- } else if(arg.equals("-V")) {
- // Select table output
- outXML=outDTD=false;
- } else if(arg.equals("-X")) {
- // Select XML output
- outXML=true;
- outDTD=outTAB=false;
- } else if(arg.equals("-Y")) {
- // Select DTD output
- outXML=outTAB=false;
- outDTD=true;
- } else if(arg.startsWith("-")) {
- System.err.println("Unknown argument: "+arg);
- System.exit(1);
- } else {
- // Ok, anything not starting with "-" are queries
- if(stat==null)
- stat=conn.createStatement();
-
- System.out.println("Executing "+arg);
- ResultSet rs = stat.executeQuery(arg);
- if(rs!=null) {
- if(outXML) {
- xrs.translate(rs,prop);
- xrs.close();
- } else if(outDTD) {
- // Output the DTD
- xrs.buildDTD(rs,prop);
- xrs.close();
- } else {
- // Normal resultset output
- int rc=0;
-
- ResultSetMetaData rsmd = rs.getMetaData();
- int nc = rsmd.getColumnCount();
- boolean us=false;
- for(int c=0;c... and each result node wrapped in
-. toptag and septag may be empty strings, in which
-case the respective tag will be omitted.
-
-Example:
-
-Given a table docstore:
-
- Attribute | Type | Modifier
------------+---------+----------
- docid | integer |
- document | text |
-
-containing documents such as (these are archaeological site
-descriptions, in case anyone is wondering):
-
-
-
- Church Farm, Ashton Keynes
- watching brief
- SU04209424
-
-
-one can type:
-
-select docid,
-pgxml_xpath(document,'//site/name/text()','','') as sitename,
-pgxml_xpath(document,'//site/location/text()','','') as location
- from docstore;
-
-and get as output:
-
- docid | sitename | location
--------+--------------------------------------+------------
- 1 | Church Farm, Ashton Keynes | SU04209424
- 2 | Glebe Farm, Long Itchington | SP41506500
- 3 | The Bungalow, Thames Lane, Cricklade | SU10229362
-(3 rows)
-
-or, to illustrate the use of the extra tags:
-
-select docid as id,
-pgxml_xpath(document,'//find/type/text()','set','findtype')
-from docstore;
-
- id | pgxml_xpath
-----+-------------------------------------------------------------------------
- 1 |
- 2 | Urn
- 3 | PotteryAnimal bone
-(3 rows)
-
-Which produces a new, well-formed document. Note that document 1 had
-no matching instances, so the set returned contains no
-elements. document 2 has 1 matching element and document 3 has 2.
-
-This is just scratching the surface because XPath allows all sorts of
-operations.
-
-Note: I've only implemented the return of nodeset and string values so
-far. This covers (I think) many types of queries, however.
-
-John Gray 16 August 2001
-
-
diff --git a/contrib/xml/pgxml.source b/contrib/xml/pgxml.source
deleted file mode 100644
index 8a04fa2c9b2..00000000000
--- a/contrib/xml/pgxml.source
+++ /dev/null
@@ -1,7 +0,0 @@
---SQL for XML parser
-
-CREATE FUNCTION pgxml_parse(text) RETURNS bool
- AS '_OBJWD_/pgxml_DLSUFFIX_' LANGUAGE 'c' WITH (isStrict);
-
-CREATE FUNCTION pgxml_xpath(text,text,text,text) RETURNS text
- AS '_OBJWD_/pgxml_DLSUFFIX_' LANGUAGE 'c' WITH (isStrict);
\ No newline at end of file
diff --git a/contrib/xml/pgxml_dom.source b/contrib/xml/pgxml_dom.source
deleted file mode 100644
index a8582398b1e..00000000000
--- a/contrib/xml/pgxml_dom.source
+++ /dev/null
@@ -1,7 +0,0 @@
---SQL for XML parser
-
-CREATE FUNCTION pgxml_parse(text) RETURNS bool
- AS '_OBJWD_/pgxml_dom_DLSUFFIX_' LANGUAGE 'c' WITH (isStrict);
-
-CREATE FUNCTION pgxml_xpath(text,text,text,text) RETURNS text
- AS '_OBJWD_/pgxml_dom_DLSUFFIX_' LANGUAGE 'c' WITH (isStrict);
\ No newline at end of file
diff --git a/doc/src/sgml/libpgeasy.sgml b/doc/src/sgml/libpgeasy.sgml
deleted file mode 100644
index 4bb1b7949bd..00000000000
--- a/doc/src/sgml/libpgeasy.sgml
+++ /dev/null
@@ -1,152 +0,0 @@
-
-
-
- libpgeasy - Simplified C Library
-
-
- Author
-
-
- Written by Bruce Momjian
- (pgman@candle.pha.pa.us)
- and last updated 2002-03-04
-
-
-
-
- pgeasy allows you to cleanly interface
- to the libpq library, more like a 4GL
- SQL interface. Refer to for more
- information about libpq.
-
-
-
- It consists of a set of simplified C functions that encapsulate the
- functionality of libpq. The functions are:
-
-
-
-
-PGresult *doquery(char *query);
-
-
-
-
-
-PGconn *connectdb(char *options);
-
-
-
-
-
-void disconnectdb();
-
-
-
-
-
-int fetch(void *param,...);
-
-
-
-
-
-int fetchwithnulls(void *param,...);
-
-
-
-
-
-void reset_fetch();
-
-
-
-
-
-void on_error_continue();
-
-
-
-
-
-void on_error_stop();
-
-
-
-
-
-PGresult *get_result();
-
-
-
-
-
-void set_result(PGresult *newres);
-
-
-
-
-
-
-
- Many functions return a structure or value, so you can work
- with the result if required.
-
-
-
- You basically connect to the database with
- connectdb, issue your query with
- doquery, fetch the results with
- fetch, and finish with
- disconnectdb.
-
-
-
- For SELECT queries, fetch
- allows you to pass pointers as parameters, and on return the
- variables are filled with data from the binary cursor you opened.
- These binary cursors cannot be used if you are running the
- pgeasy client on a system with a different
- architecture than the database server. If you pass a NULL pointer
- parameter, the column is skipped. fetchwithnulls
- allows you to retrieve the NULL status of the field by passing an
- int* after each result pointer, which returns true
- or false to indicate if the field is null. You can always use
- libpq functions on the
- PGresult pointer returned by
- doquery. reset_fetch starts
- the fetch back at the beginning.
-
-
-
- get_result and set_result
- allow you to handle multiple open result sets. Use
- get_result to save a result into an application
- variable. You can then later use set_result to
- return to the previously save result.
-
-
-
- There are several demonstration programs in
- pgsql/src/interfaces/libpgeasy/examples>.
-
-
-
-
diff --git a/doc/src/sgml/odbc.sgml b/doc/src/sgml/odbc.sgml
deleted file mode 100644
index cbc40624767..00000000000
--- a/doc/src/sgml/odbc.sgml
+++ /dev/null
@@ -1,805 +0,0 @@
-
-
-
-
-
-
- Tim
- Goeke
-
-
- Thomas
- Lockhart
-
-
- 1998-10-21
-
-
- ODBC Interface
-
-
- ODBC
-
-
-
- Introduction
-
-
-
- Background information originally by Tim Goeke
- (tgoeke@xpressway.com)
-
-
-
-
- ODBC (Open Database Connectivity) is an abstract
- API
- that allows you to write applications that can interoperate
- with various RDBMS servers.
- ODBC provides a product-neutral interface
- between frontend applications and database servers,
- allowing a user or developer to write applications that are
- portable between servers from different manufacturers..
-
-
-
- The ODBC API matches up
- on the backend to an ODBC-compatible data source.
- This could be anything from a text file to an Oracle or
- PostgreSQL RDBMS.
-
-
-
- The backend access comes from ODBC drivers,
- or vendor-specific drivers that
- allow data access. psqlODBC, which is included in the PostgreSQL> distribution, is such a driver,
- along with others that are
- available, such as the OpenLink ODBC drivers.
-
-
-
- Once you write an ODBC application,
- you should be able to connect to any
- back-end database, regardless of the vendor, as long as the database schema
- is the same.
-
-
-
- For example. you could have MS SQL Server
- and PostgreSQL servers that have
- exactly the same data. Using ODBC,
- your Windows application would make exactly the
- same calls and the back-end data source would look the same (to the Windows
- application).
-
-
-
-
- Installation
-
-
- In order to make use of an ODBC> driver there must
- exist a driver manager> on the system where the
- ODBC> driver is to be used. There are two free
- ODBC> driver managers for Unix-like operating systems
- known to us: iODBC
- iODBC>
- and unixODBCunixODBC>.
- Instructions for installing these driver managers are to be found
- in the respective distribution. Software that provides database
- access through ODBC should provide its own
- driver manager (which may well be one of these two). Having said
- that, any driver manager that you can find for your platform
- should support the PostgreSQL> ODBC>
- driver, or any other ODBC> driver for that matter.
-
-
-
-
- The unixODBC> distribution ships with a
- PostgreSQL> ODBC> driver of its own,
- which is similar to the one contained in the
- PostgreSQL> distribution. It is up to you which
- one you want to use. We plan to coordinate the development of
- both drivers better in the future.
-
-
-
-
- To install the ODBC> you simply need to supply the
-
-
-
- It is also possible to build the driver to be specifically tuned
- for use with iODBC> or unixODBC>.
- This means in particular that the driver will use the driver
- manager's routines to process the configuration files, which is
- probably desirable since it creates a more consistent
- ODBC> environment on your system. If you want to do
- that, then supply the configure> options
-
-
-
- If you build a stand-alone driver (not tied to
- iODBC> or unixODBC>), then you can
- specify where the driver should look for the configuration file
- odbcinst.ini>. By default it will be the directory
- /usr/local/pgsql/etc/>, or equivalent, depending on
- what
-
-
- odbc.sql>>
- Additionally, you should install the ODBC catalog extensions. That will
- provide a number of functions mandated by the ODBC standard that are not
- supplied by PostgreSQL> by default. The file
- /usr/local/pgsql/share/odbc.sql> (in the default installation layout)
- contains the appropriate definitions, which you can install as follows:
-
-psql -d template1 -f LOCATION>/odbc.sql
-
- where specifying template1 as the target
- database will ensure that all subsequent new databases will have
- these same definitions. If for any reason you want to remove
- these functions again, run the file
- odbc-drop.sql through
- psql.
-
-
-
-
- Configuration Files
-
- .odbc.ini>>
-
-
- ~/.odbc.ini contains user-specified access information
- for the psqlODBC driver.
- The file uses conventions typical for Windows
- Registry files.
-
-
-
- The .odbc.ini file has three required sections.
- The first is [ODBC Data Sources]
- which is a list of arbitrary names and descriptions for each database
- you wish to access. The second required section is the
- Data Source Specification and there will be one of these sections
- for each database.
- Each section must be labeled with the name given in
- [ODBC Data Sources] and must contain the following entries:
-
-
-Driver = prefix/lib/libpsqlodbc.so
-Database = DatabaseName
-Servername = localhost
-Port = 5432
-
-
-
-
- Remember that the PostgreSQL database name is
- usually a single word, without path names of any sort.
- The PostgreSQL server manages the actual access
- to the database, and you need only specify the name from the client.
-
-
-
- Other entries may be inserted to control the format of the display.
- The third required section is [ODBC]
- which must contain the InstallDir keyword
- and which may contain other options.
-
-
-
- Here is an example .odbc.ini file,
- showing access information for three databases:
-
-
-[ODBC Data Sources]
-DataEntry = Read/Write Database
-QueryOnly = Read-only Database
-Test = Debugging Database
-Default = Postgres Stripped
-
-[DataEntry]
-ReadOnly = 0
-Servername = localhost
-Database = Sales
-
-[QueryOnly]
-ReadOnly = 1
-Servername = localhost
-Database = Sales
-
-[Test]
-Debug = 1
-CommLog = 1
-ReadOnly = 0
-Servername = localhost
-Username = tgl
-Password = "no$way"
-Port = 5432
-Database = test
-
-[Default]
-Servername = localhost
-Database = tgl
-Driver = /opt/postgres/current/lib/libpsqlodbc.so
-
-[ODBC]
-InstallDir = /opt/applix/axdata/axshlib
-
-
-
-
-
- Windows Applications
-
-
- In the real world, differences in drivers and the level of
- ODBC support
- lessens the potential of ODBC:
-
-
-
-
- Access, Delphi, and Visual Basic all support ODBC directly.
-
-
-
-
- Under C++, such as Visual C++,
- you can use the C++ ODBC API.
-
-
-
-
-
- In Visual C++, you can use the CRecordSet class, which wraps the
- ODBC API
- set within an MFC 4.2 class. This is the easiest route if you are doing
- Windows C++ development under Windows NT.
-
-
-
-
-
-
- Writing Applications
-
-
-
- If I write an application for PostgreSQL
- can I write it using ODBC calls
- to the PostgreSQL server,
- or is that only when another database program
- like MS SQL Server or Access needs to access the data?
-
-
-
- The ODBC API
- is the way to go.
- For Visual C++ coding you can find out more at
- Microsoft's web site or in your Visual C++
- documentation.
-
-
-
- Visual Basic and the other RAD tools have Recordset objects
- that use ODBC
- directly to access data. Using the data-aware controls, you can quickly
- link to the ODBC back-end database
- (very quickly).
-
-
-
- Playing around with MS Access> will help you sort this out. Try using
- File>Get External Data>.
-
-
-
-
- You'll have to set up a DSN first.
-
-
-
-
-
-
-
- ApplixWare
-
-
- Applixware
-
-
-
- Applixware has an
- ODBC database interface
- supported on at least some platforms.
- Applixware 4.4.2 has been
- demonstrated under Linux with PostgreSQL 7.0
- using the psqlODBC
- driver contained in the PostgreSQL distribution.
-
-
-
- Configuration
-
-
- Applixware must be configured correctly
- in order for it to
- be able to access the PostgreSQL
- ODBC software drivers.
-
-
-
- Enabling Applixware Database Access
-
-
- These instructions are for the 4.4.2 release of
- Applixware on Linux.
- Refer to the Linux Sys Admin on-line book
- for more detailed information.
-
-
-
-
- You must modify axnet.cnf so that
- elfodbc can
- find libodbc.so
- (the ODBC driver manager) shared library.
- This library is included with the Applixware distribution,
- but axnet.cnf needs to be modified to point to the
- correct location.
-
-
-
- As root, edit the file
- applixroot/applix/axdata/axnet.cnf.
-
-
-
-
-
-
- At the bottom of axnet.cnf,
- find the line that starts with
-
-
-#libFor elfodbc /ax/...
-
-
-
-
-
- Change line to read
-
-
-libFor elfodbc applixroot/applix/axdata/axshlib/lib
-
-
- which will tell elfodbc to look in this directory
- for the ODBC support library.
- Typically Applix is installed in
- /opt so the full path would be
- /opt/applix/axdata/axshlib/lib,
- but if you have installed Applix
- somewhere else then change the path accordingly.
-
-
-
-
-
-
-
- Create .odbc.ini as
- described in . You may also want to add the flag
-
-
-TextAsLongVarchar=0
-
-
- to the database-specific portion of .odbc.ini
- so that text fields will not be shown as **BLOB**.
-
-
-
-
-
- Testing Applixware ODBC Connections
-
-
-
- Bring up Applix Data
-
-
-
-
-
- Select the PostgreSQL database of interest.
-
-
-
-
-
-
- Select QueryChoose Server.
-
-
-
-
- Select ODBC, and click Browse.
- The database you configured in .odbc.ini
- should be shown. Make sure that the Host: field
- is empty (if it is not, axnet> will try to contact axnet> on another machine
- to look for the database).
-
-
-
-
- Select the database in the box that was launched by Browse,
- then click OK.
-
-
-
-
- Enter user name and password in the login identification dialog,
- and click OK.
-
-
-
-
-
- You should see Starting elfodbc server
- in the lower left corner of the
- data window. If you get an error dialog box, see the debugging section
- below.
-
-
-
-
- The Ready message will appear in the lower left corner of the data
- window. This indicates that you can now enter queries.
-
-
-
-
- Select a table from
- Query>Choose
- tables>, and then select
- Query>Query>
- to access the database. The first 50 or so rows from the table
- should appear.
-
-
-
-
-
-
- Common Problems
-
-
- The following messages can appear while trying to make an
- ODBC connection through
- Applix Data:
-
-
-
-
- Cannot launch gateway on server
-
-
-
- elfodbc can't find libodbc.so.
- Check your axnet.cnf.
-
-
-
-
-
-
- Error from ODBC Gateway:
- IM003::[iODBC][Driver Manager]Specified driver could not be loaded
-
-
-
- libodbc.so cannot find the driver listed in
- .odbc.ini. Verify the settings.
-
-
-
-
-
-
- Server: Broken Pipe
-
-
-
-
- The driver process has terminated due to some other
- problem. You might not have an up-to-date version
- of the PostgreSQL
- ODBC package.
-
-
-
-
-
-
- setuid to 256: failed to launch gateway
-
-
-
-
- The September release of Applixware 4.4.1 (the first release with official
- ODBC support under Linux) shows problems when user names
- exceed eight (8) characters in length.
- Problem description contributed by Steve Campbell
- (scampbell@lear.com).
-
-
-
-
-
-
-
-
-
- Author
-
-
- Contributed by Steve Campbell (scampbell@lear.com),
- 1998-10-20
-
-
-
- The axnet program's security system
- seems a little suspect. axnet does things
- on behalf of the user and on a true
- multiuser system it really should be run with root security
- (so it can read/write in each user's directory).
- I would hesitate to recommend this, however, since we have no idea what
- security holes this creates.
-
-
-
-
- Debugging Applixware ODBC Connections
-
-
- One good tool for debugging connection problems uses the Unix system
- utility strace.
-
-
- Debugging with strace
-
-
-
- Start Applixware.
-
-
-
-
- Start an strace on
- the axnet process. For example, if
-
-
-$ps -aucx | grep ax
-
-
- shows
-
-
-cary 10432 0.0 2.6 1740 392 ? S Oct 9 0:00 axnet
-cary 27883 0.9 31.0 12692 4596 ? S 10:24 0:04 axmain
-
-
-
-
- Then run
-
-
-$strace -f -s 1024 -p 10432
-
-
-
-
-
-
- Check the strace output.
-
-
- Note from Cary
-
-
- Many of the error messages from Applixware
- go to stderr,
- but I'm not sure where stderr
- is sent, so strace is the way to find out.
-
-
-
-
-
-
- For example, after getting
- a Cannot launch gateway on server,
- I ran strace on axnet and got
-
-
-[pid 27947] open("/usr/lib/libodbc.so", O_RDONLY) = -1 ENOENT (No such file or directory)
-[pid 27947] open("/lib/libodbc.so", O_RDONLY) = -1 ENOENT (No such file or directory)
-[pid 27947] write(2, "/usr2/applix/axdata/elfodbc: can't load library 'libodbc.so'\n", 61) = -1 EIO (I/O error)
-
- So what is happening is that applix elfodbc is searching for libodbc.so, but it
- cannot find it. That is why axnet.cnf needed to be changed.
-
-
-
-
- Running the Applixware Demo
-
- I think the condition this refers to is gone. -- petere 2002-01-07
-
-
- In order to go through the
- Applixware Data Tutorial, you need to create
- the sample tables that the Tutorial refers to. The ELF Macro used to
- create the tables tries to use a NULL condition
- on many of the database columns,
- and PostgreSQL does not currently allow this option.
-
-
-
- To get around this problem, you can do the following:
-
-
-
- Modifying the Applixware Demo
-
-
-
- Copy /opt/applix/axdata/eng/Demos/sqldemo.am
- to a local directory.
-
-
-
-
-
- Edit this local copy of sqldemo.am:
-
-
-
-
-
-
- Search for null_clause = "NULL".
-
-
-
-
-
- Change this to null_clause = "".
-
-
-
-
-
-
-
- Start Applix Macro Editor.
-
-
-
-
-
- Open the sqldemo.am file from the Macro Editor.
-
-
-
-
-
- Select File>Compile and Save>.
-
-
-
-
-
- Exit Macro Editor.
-
-
-
-
-
- Start Applix Data.
-
-
-
-
-
- Select *>Run Macro.
-
-
-
-
-
- Enter the value sqldemo, then click OK.
-
-
-
- You should see the progress in the status line of the data window
- (in the lower left corner).
-
-
-
-
-
- You should now be able to access the demo tables.
-
-
-
-
-
-
- Useful Macros
-
-
- You can add information about your
- database login and password to the standard Applix start-up
- macro file. This is an example
- ~/axhome/macros/login.am file:
-
-
-macro login
-set_set_system_var@("sql_username@","tgl")
-set_system_var@("sql_passwd@","no$way")
-endmacro
-
-
-
-
- You should be careful about the file protections on any file containing
- user name and password information.
-
-
-
-
-
-
-
-
-
diff --git a/doc/src/sgml/recovery.sgml b/doc/src/sgml/recovery.sgml
deleted file mode 100644
index aa10b8cf640..00000000000
--- a/doc/src/sgml/recovery.sgml
+++ /dev/null
@@ -1,104 +0,0 @@
-
- Database Failures
-
-
- Database failures (or the possibility of such) must be assumed to be
- lurking, ready to strike at some time in the future. A prudent
- database administrator will plan for the inevitability of failures
- of all possible kinds, and will have appropriate plans and
- procedures in place before the failure occurs.
-
-
-
- Database recovery is necessary in the event of hardware or software
- failure. There are several categories of failures; some of these
- require relatively minor adjustments to the database, while others
- may depend on the existence of previously prepared database dumps
- and other recovery data sets. It should be emphasized that if your
- data is important and/or difficult to regenerate, then you should
- have considered and prepared for various failure scenarios.
-
-
-
- Disk Filled
-
-
- A filled data disk may result in subsequent corruption of database
- indexes, but not of the fundamental data tables. If the WAL files
- are on the same disk (as is the case for a default configuration)
- then a filled disk during database initialization may result in
- corrupted or incomplete WAL files. This failure condition is
- detected and the database will refuse to start up. You must free
- up additional space on the disk (or move the WAL area to another
- disk; see ) and then restart the
- postmaster to recover from this condition.
-
-
-
-
- Disk Failed
-
-
- Failure of any disk (or of a logical storage device such as a RAID
- subsystem) involved with an active database will require
- that the database be recovered from a previously prepared database
- dump. This dump must be prepared using
- pg_dumpall, and updates to the database
- occurring after the database installation was dumped will be lost.
-
-
-
-
-
-
-
-
diff --git a/doc/src/sgml/version.sgml b/doc/src/sgml/version.sgml
deleted file mode 100644
index 33ff27cd658..00000000000
--- a/doc/src/sgml/version.sgml
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-
-
diff --git a/doc/src/sgml/y2k.sgml b/doc/src/sgml/y2k.sgml
deleted file mode 100644
index 60ad62e51ed..00000000000
--- a/doc/src/sgml/y2k.sgml
+++ /dev/null
@@ -1,95 +0,0 @@
-
-
-
- Y2K Statement
-
-
- Author
-
-
- Written by Thomas Lockhart
- (lockhart@fourpalms.org)
- on 1998-10-22. Updated 2000-03-31.
-
-
-
-
- The PostgreSQL Global Development Group provides
- the PostgreSQL software code tree as a public service,
- without warranty and without liability for its behavior or performance.
- However, at the time of writing:
-
-
-
-
-
- The author of this statement, a volunteer on the
- PostgreSQL
- support team since November, 1996, is not aware of
- any problems in the PostgreSQL code base related
- to time transitions around Jan 1, 2000 (Y2K).
-
-
-
-
-
- The author of this statement is not aware of any reports of Y2K problems
- uncovered in regression testing
- or in other field use of recent or current versions
- of PostgreSQL. We might have expected
- to hear about problems if they existed, given the installed base and
- the active participation of users on the support mailing lists.
-
-
-
-
-
- To the best of the author's knowledge, the
- assumptions PostgreSQL
- makes about dates specified with a two-digit year
- are documented in the current User's Guide
- in the chapter on data types.
- For two-digit years, the significant transition year is 1970, not 2000;
- e.g. 70-01-01 is interpreted as 1970-01-01,
- whereas 69-01-01 is interpreted as 2069-01-01.
-
-
-
-
-
- Any Y2K problems in the underlying OS related to obtaining the
- current time may propagate into apparent Y2K problems in
- PostgreSQL.
-
-
-
-
-
- Refer to
- The GNU Project
- and
- The Perl Institute
- for further discussion of Y2K issues, particularly
- as it relates to open source, no fee software.
-
-
-
-
-
diff --git a/src/interfaces/jdbc/CHANGELOG b/src/interfaces/jdbc/CHANGELOG
deleted file mode 100644
index 863f8360ba9..00000000000
--- a/src/interfaces/jdbc/CHANGELOG
+++ /dev/null
@@ -1,467 +0,0 @@
-Tue Mar 06 12:05:00 GMT 2001 peter@retep.org.uk
- - Removed org.postgresql.xa.Test from the JDBC EE driver as it's an old
- test class and prevented it from compiling.
-
-Fri Mar 02 10:00:00 GMT 2001 peter@retep.org.uk
- - Fixed build.xml so that PGclob is not built in the JDBC1.2 driver
-
-
-Fri Feb 17 18:25:00 GMT 2001 peter@retep.org.uk
- - Removed the last deprecation warnings from the Java2 driver. Now only
- the old examples give deprecation warnings.
- - Added a new class into core that (JDK1.3+) ensures all connections are
- closed when the VM terminates.
-
-Fri Feb 17 15:11:00 GMT 2001 peter@retep.org.uk
- - Reduced the object overhead in PreparedStatement by reusing the same
- StringBuffer object throughout. Similarly SimpleDateStamp's are alse
- reused in a thread save manner.
- - Implemented in PreparedStatement: setNull(), setDate/Time/Timestamp
- using Calendar, setBlob(), setCharacterStream()
- - Clob's are now implemented in ResultSet & PreparedStatement!
- - Implemented a lot of DatabaseMetaData & ResultSetMetaData methods.
- We have about 18 unimplemented methods left in JDBC2 at the current
- time.
-
-Web Feb 14 17:29:00 GMT 2001 peter@retep.org.uk
- - Fixed bug in LargeObject & BlobOutputStream where the stream's output
- was not flushed when either the stream or the blob were closed.
- - Fixed PreparedStatement.setBinaryStream() where it ignored the length
-
-Tue Feb 13 16:33:00 GMT 2001 peter@retep.org.uk
- - More TestCases implemented. Refined the test suite api's.
- - Removed need for SimpleDateFormat in ResultSet.getDate() improving
- performance.
- - Rewrote ResultSet.getTime() so that it uses JDK api's better.
-
-Tue Feb 13 10:25:00 GMT 2001 peter@retep.org.uk
- - Added MiscTest to hold reported problems from users.
- - Fixed PGMoney.
- - JBuilder4/JDBCExplorer now works with Money fields. Patched Field &
- ResultSet (lots of methods) for this one. Also changed cash/money to
- return type DOUBLE not DECIMAL. This broke JBuilder as zero scale
- BigDecimal's can't have decimal places!
- - When a Statement is reused, the previous ResultSet is now closed.
- - Removed deprecated call in ResultSet.getTime()
-
-Thu Feb 08 18:53:00 GMT 2001 peter@retep.org.uk
- - Changed a couple of settings in DatabaseMetaData where 7.1 now
- supports those features
- - Implemented the DatabaseMetaData TestCase.
-
-Wed Feb 07 18:06:00 GMT 2001 peter@retep.org.uk
- - Added comment to Connection.isClosed() explaining why we deviate from
- the JDBC2 specification.
- - Fixed bug where the Isolation Level is lost while in autocommit mode.
- - Fixed bug where several calls to getTransactionIsolationLevel()
- returned the first call's result.
-
-Tue Feb 06 19:00:00 GMT 2001 peter@retep.org.uk
- - Completed first two TestCase's for the test suite. JUnit is now
- recognised by ant.
-
-Wed Jan 31 08:46:00 GMT 2001 peter@retep.org.uk
- - Some minor additions to Statement to make our own extensions more
- portable.
- - Statement.close() will now call ResultSet.close() rather than just
- dissasociating with it.
-
-Tue Jan 30 22:24:00 GMT 2001 peter@retep.org.uk
- - Fixed bug where Statement.setMaxRows() was a global setting. Now
- limited to just itself.
- - Changed LargeObject.read(byte[],int,int) to return the actual number
- of bytes read (used to be void).
- - LargeObject now supports InputStream's!
- - PreparedStatement.setBinaryStream() now works!
- - ResultSet.getBinaryStream() now returns an InputStream that doesn't
- copy the blob into memory first!
- - Connection.isClosed() now tests to see if the connection is still alive
- rather than if it thinks it's alive.
-Thu Jan 25 09:11:00 GMT 2001 peter@retep.org.uk
- - Added an alternative constructor to PGSQLException so that debugging
- some more osteric bugs is easier. If only 1 arg is supplied and it's
- of type Exception, then that Exception's stacktrace is now included.
-
-Wed Jan 24 09:18:00 GMT 2001 peter@retep.org.uk
- - Removed the 8k limit by setting it to 64k
-
-Fri Jan 19 08:47:00 GMT 2001 peter@retep.org.uk
- - Applied patch submitted by John Schutz that
- fixed a bug with ANT's SQL functions (not needed for building but nice
- to have fixed).
-
-Thu Jan 18 17:30:00 GMT 2001 peter@retep.org.uk
- - Added new error message into errors.properties "postgresql.notsensitive"
- This is used by jdbc2.ResultSet when a method is called that should
- fetch the current value of a row from the database refreshRow() for
- example.
- - These methods no longer throw the not implemented but the new noupdate
- error. This is in preparation for the Updateable ResultSet support
- which will overide these methods by extending the existing class to
- implement that functionality, but needed to show something other than
- notimplemented:
- moveToCurrentRow()
- moveToInsertRow()
- rowDeleted()
- rowInserted()
- all update*() methods, except those that took the column as a String
- as they were already implemented to convert the String to an int.
- - getFetchDirection() and setFetchDirection() now throws
- "postgresql.notimp" as we only support one direction.
- The CursorResultSet will overide this when its implemented.
- - Created a new class under jdbc2 UpdateableResultSet which extends
- ResultSet and overides the relevent update methods.
- This allows us to implement them easily at a later date.
- - In jdbc2.Connection, the following methods are now implemented:
- createStatement(type,concurrency);
- getTypeMap();
- setTypeMap(Map);
- - The JDBC2 type mapping scheme almost complete, just needs SQLInput &
- SQLOutput to be implemented.
- - Removed some Statement methods that somehow appeared in Connection.
- - In jdbc2.Statement()
- getResultSetConcurrency()
- getResultSetType()
- setResultSetConcurrency()
- setResultSetType()
- - Finally removed the old 6.5.x driver.
-
-Thu Jan 18 12:24:00 GMT 2001 peter@retep.org.uk
- - These methods in org.postgresql.jdbc2.ResultSet are now implemented:
- getBigDecimal(int) ie: without a scale (why did this get missed?)
- getBlob(int)
- getCharacterStream(int)
- getConcurrency()
- getDate(int,Calendar)
- getFetchDirection()
- getFetchSize()
- getTime(int,Calendar)
- getTimestamp(int,Calendar)
- getType()
- NB: Where int represents the column name, the associated version
- taking a String were already implemented by calling the int
- version.
- - These methods no longer throw the not implemented but the new noupdate
- error. This is in preparation for the Updateable ResultSet support
- which will overide these methods by extending the existing class to
- implement that functionality, but needed to show something other than
- notimplemented:
- cancelRowUpdates()
- deleteRow()
- - Added new error message into errors.properties "postgresql.noupdate"
- This is used by jdbc2.ResultSet when an update method is called and
- the ResultSet is not updateable. A new method notUpdateable() has been
- added to that class to throw this exception, keeping the binary size
- down.
- - Added new error message into errors.properties "postgresql.psqlnotimp"
- This is used instead of unimplemented when it's a feature in the
- backend that is preventing this method from being implemented.
- - Removed getKeysetSize() as its not part of the ResultSet API
-
-Thu Jan 18 09:46:00 GMT 2001 peter@retep.org.uk
- - Applied modified patch from Richard Bullington-McGuire
- . I had to modify it as some of the code
- patched now exists in different classes, and some of it actually
- patched obsolete code.
-
-Wed Jan 17 10:19:00 GMT 2001 peter@retep.org.uk
- - Updated Implementation to include both ANT & JBuilder
- - Updated README to reflect the changes since 7.0
- - Created jdbc.jpr file which allows JBuilder to be used to edit the
- source. JBuilder _CAN_NOT_ be used to compile. You must use ANT for
- that. It's only to allow JBuilders syntax checking to improve the
- drivers source. Refer to Implementation for more details
-
-Wed Dec 20 16:19:00 GMT 2000 peter@retep.org.uk
- - Finished build.xml and updated Driver.java.in and buildDriver to
- match how Makefile and ANT operate.
-
-Tue Dec 19 17:30:00 GMT 2000 peter@retep.org.uk
- - Finally created ant build.xml file
-
-Mon Nov 20 08:12:00 GMT 2000 peter@retep.org.uk
- - Encoding patch to Connection by wrobell@posexperts.com.pl
-
-Tue Oct 17 15:35:00 BST 2000 petermount@maidstone.gov.uk
- - Changed getTimestamp() again. This time Michael Stephenson's
- solution looked far better
- than the original solution put in June.
-
-Tue Oct 10 13:12:00 BST 2000 peter@retep.org.uk
- - DatabaseMetaData.supportsAlterTableWithDropColumn() as psql doesn't
- support dropping of individual columns
- - Merged in some last patches. Only 1 left, which may not be compatible
- with jdbc1
- - Merged in my old retepsql project. Makefile now includes it.
-
-Mon Oct 02 12:30:00 BST 2000 peter@retep.org.uk
- - Merged in byte[] array allocation changes submitted by Gunnar R|nning
-
-
-Mon Sep 25 14:22:00 BST 2000 peter@retep.org.uk
- - Removed the DriverClass kludge. Now the org.postgresql.Driver class
- is compiled from a template file, and now has both the connection
- class (ie jdbc1/jdbc2) and the current version's from Makefile.global
-
-Thu Jul 20 16:30:00 BST 2000 petermount@it.maidstone.gov.uk
- - Fixed DatabaseMetaData.getTableTypes()
-
-Tue Jun 06 12:00:00 BST 2000 petermount@it.maidstone.gov.uk
- - Added org/postgresql/DriverClass.java to the list of files removed
- by make clean (it's dynamically built)
- - Fixed Statement, so that the update count is valid when an SQL
- DELETE operation is done.
- - While fixing the update count, made it easier to get the OID of
- the last insert as well. Example is in example/basic.java
-
-Tue Jun 06 08:37:00 BST 2000 petermount@it.maidstone.gov.uk
- - Removed a hardwired 8K limit on query strings
- - Added some missing org.'s in Connection that prevented
- the use of the geometric types.
-
-Thu Jun 01 07:26:00 BST 2000 petermount@it.maidstone.gov.uk
- - Removed timezone in getTimestamp() methods in ResultSet.
-
-Mon May 15 22:30:00 BST 2000 peter@retep.org.uk
- - Fixed the message Makefile produces after compiling. It still said
- about the old Driver class, not the new package. Spotted by
- Joseph Shraibman
-
-Thu May 04 11:38:00 BST 2000 petermount@it.maidstone.gov.uk
- - Corrected incorrect date in CHANGELOG
- - Fixed the ImageViewer example
-
-Wed May 03 16:47:00 BST 2000 petermount@it.maidstone.gov.uk
- - Fixed the Makefile so that postgresql.jar is built everytime
- the jdbc1 or jdbc2 rules are called.
- - Fixed the threadsafe example. It had problems with autocommit
-
-Wed May 03 14:32:00 BST 2000 petermount@it.maidstone.gov.uk
- - Rewrote the README file (the old one was 18 months old!)
- - Added @deprecated tags to org.postgresql.jdbc2.ResultSet
- to clear some warnings issued during compilation.
-
-Wed Apr 12 22:14:00 BST 2000 peter@retep.org.uk
- - Implemented the JDBC2 Blob interface, and ResultSet.getBlob().
-
-Wed Apr 12 20:20:00 BST 2000 peter@retep.org.uk
- - Fixed bug in ResultSet.absolute(). Negative rows are now supported.
- - Implemented ResultSet.relative(), afterLast().
-
-Tue Feb 1 21:40:00 GMT 2000 peter@retep.org.uk
- - Finally imported the contributed javax extensions by Assaf Arkin
- arkin@exoffice.com
-
-Mon Jan 24 21:00:00 GMT 2000 peter@retep.org.uk
- - Finally introduced the 7.0 additions to the core CVS repository.
- - All source files are now under the org.postgresql package (previously
- they were under postgresql). The package lines now changed
- accordingly.
- - The Makefile was rewritten so it should now work on machines that
- can't handle the $( ) syntax.
- - Dutch translation by Arnout Kuiper (ajkuiper@wxs.nl)
-
-Mon Sep 13 23:56:00 BST 1999 peter@retep.org.uk
- - PG_Stream.SendChar() optimised, increased default buffer size of
- output stream to 8k, and introduced an 8k buffer on the input stream
- Sverre H Huseby
- - Added a finalize() method to Connection class in both drivers so that
- the connection to the backend is really closed.
- - Due to many JVM's not returning a meaningful value for java.version
- the decision for building the JDBC1.2 or JDBC2 driver is now a
- compile time option.
- - Replaced $$(cmd...) with `cmd...` in the Makefile. This should allow
- the driver to compile when using shells other than Bash.
-
-Thu Sep 9 01:18:39 MEST 1999 jens@jens.de
- - fixed bug in handling of DECIMAL type
-
-Wed Aug 4 00:25:18 CEST 1999 jens@jens.de
- - updated ResultSetMetaData.getColumnDisplaySize() to return
- the actual display size
- - updated driver to use postgresql FE/BE-protocol version 2
-
-Mon Aug 2 03:29:35 CEST 1999 jens@jens.de
- - fixed bug in DatabaseMetaData.getPrimaryKeys()
-
-Sun Aug 1 18:05:42 CEST 1999 jens@jens.de
- - added support for getTransactionIsolation and setTransactionIsolation
-
-Sun Jun 27 12:00:00 BST 1999
- - Fixed typo in postgresql.Driver that prevented compilation
- - Implemented getTimestamp() fix submitted by Philipp Matthias Hahn
-
- - Cleaned up some comments in Connection
-
-Wed Jun 23 06:50:00 BST 1999
- - Fixed error in errors.properties where the arguments are 0 based not
- 1 based
- - Fixed bug in postgresql.Driver where exception is thrown, then
- intercepted rather than being passed to the calling application.
- - Removed the file postgresql/CallableStatement, as it's not used and
- really exists in the jdbc1 & jdbc2 sub packages only.
-
-Wed May 19 00:20:00 BST 1999
- - Internationalisation now done. Surprising that there's 68 error
- messages in the driver ;-)
-
-Tue May 18 07:00:00 BST 1999
- - Set the ImageViewer application to use transactions
-
-Tue May 18 00:00:00 BST 1999
- - Just after committing, I realised why internationalisation isn't
- working. This is now fixed (in the Makefile).
-
-Mon May 17 23:40:00 BST 1999
- - PG_Stream.close() now attempts to send the close connection message
- to the backend before closing the streams
- - Added batch support in the JDBC2, supplied by Yutaka Tanida
-
- - Removed the old datestyle code. Now the driver uses only ISO.
- - Removed some files in the postgresql directory still in CVS that were
- moved since 6.4.x (DatabaseMetaData.java PreparedStatement.java
- ResultSetMetaData.java Statement.java)
- - Internationalisation of the error messages is partially implemented,
- however it's not enabled as it only works when the jar file is
- _not_ used, and work needs to be done.
-
-Sun Apr 11 17:00:00 BST 1999
- - getUpdateCount() now returns the actual update count (before it
- simply returned 1 for everything).
- - added some updates to example.basic so it would test the new update
- count code.
- - corrected typo in a comment in Statement.java
-
-Mon Jan 25 19:45:00 GMT 1999
- - created subfolders example/corba and example/corba/idl to hold the
- new example showing how to hook CORBA and PostgreSQL via JDBC
- - implemented some JDBC2 methods curtesy of Joachim.Gabler@t-online.de
-
-Sat Jan 23 10:30:00 GMT 1999
- - Changed imports in postgresql.jdbc1.ResultSetMetaData as for some
- reason it didn't want to compile under jdk1.1.6
-
-Tue Dec 29 15:45:00 GMT 1998
- - Refreshed the README (which was way out of date)
-
-Tue Dec 29 15:45:00 GMT 1998
- - Finished adding the additional methods into the JDBC2 driver.
- - Had to add some explicit package references for the JDK1.2 Javac to
- cope with the driver
-
-Tue Dec 29 12:40:00 GMT 1998
- - Fixed package imports and some references to java.sql.ResultSet in
- various files. Compiled and tested the JDBC1 driver.
-
-Mon Dec 28 19:01:37 GMT 1998
- - created a new package postgresql.jdbc2 which will contain the JDBC 2
- specific classes. A similar new package (postgresql.jdbc1) has been
- created to hold the JDBC 1 specific classes.
- - modified Makefile to allow compilation of the JDBC 1 & 2 drivers,
- with the possibility of building a dual-spec driver.
- - changed the version number in postgresql.Driver to 6.5
- - modified postgresql.Driver class to initiate the correct driver when
- used under a 1.1 or 1.2+ JVM.
- - postgresql.Connection and postgresql.jdbc2.Connection now extends the
- new class postgresql.ConnectionStub, which allows us to dynamically
- open the JDBC1 or JDBC2 drivers.
- - enabled compilation of the driver under Win32 when using the Make
- from the CygWin package (Cygnus B20.1 was used).
- - To make future development easier (now we have 2 specifications to
- work with) the following classes have moved from the postgresql to
- the postgresql.jdbc1 package:
- CallableStatement Connection
- DatabaseMetaData PreparedStatement
- ResultSet ResultSetMetaData
- Statement
- Some of these classes have common code that is not dependent on
- either JDBC specification. These common code are still in the
- postgresql package.
- Ie: postgresql.jdbc1.Connection extends postgresql.Connection
- and postgresql.jdbc2.Connection extends postgresql.Connection
-
-Web Oct 7 22:00:00 BST 1998
- - removed syncronised from Connection.ExecSQL(). See next entry.
- - added new syncronised locking in the Connection.ExecSQL() and
- FastPath.fastpath() methods. They now lock against the PG_Steam
- object for the connection, which now provides full Thread Safety.
- - Reposted ChangeLog as it's missing from CVS.
-
-Modifications done since 6.3.2 was released and Sun Aug 30 11:33:06 BST 1998
-
- - Fixed PreparedStatement.setObject as it didn't handle shorts
- - ResultSet.getDate() now handles null dates (returns null ratrher
- than a NullPointerException)
- - ResultSetMetaData.getPrecision() new returns 0 for VARCHAR
- - Field now caches the typename->oid in a Hashtable to speed things
- up. It removes the need for some unnecessary queries to the backend.
- - PreparedStatement.toString() now returns the SQL statement that it
- will send to the backend. Before it did nothing.
- - DatabaseMetaData.getTypeInfo() now does something.
- - Connection now throws an exception if either of the user or password
- properties are missing, as they are required for JDBC to work.
- This occasionally occurs when the client uses the properties version
- of getConnection(), and is a common question on the email lists.
-
-Sun Aug 30 11:33:06 BST 1998
-
- - Created ChangeLog file, and entered stuff done since 6.3.2 and today
- - Change version number to 6.4 in Driver.java
- - Added fix to DatabaseMetaData.getTables() submitted by
- Stefan Andreasen
- - Added fix to DatabaseMetaData.getColumns() to handle patterns
- submitted by Stefan Andreasen
- - Set TcpNoDelay on the connection, as this gives us a 10x speed
- improvement on FreeBSD (caused by a bug in their TCP Stack). They
- should fix the bug before 6.4 is released, but will keep this
- in here unless it causes more problems.
- Submitted by Jason Venner
- - Removed a duplicate definition of fieldCache
- - Added a more meaningful message when the connection is refused. It
- now says:
- Connection refused. Check that the hostname and port is
- correct, and that the postmaster is running with the -i flag,
- which enables TCP/IP networking.
- - Removed kludge in PreparedStatement.setDate() that acted as a
- temporary fix to a bug in SimpleDateFormat, as it broke date
- handling in JDK 1.1.6.
- - Modified PG_Stream and Connection, so that outbound data is now
- buffered. This should give us a speed improvement, and reduce the
- ammount of network packets generated.
- - Removed duplicate code and optimised PG_Stream.
- - PG_Stream now returns a more meaningful message when the connection
- is broken by the backend. It now returns:
- The backend has broken the connection. Possibly the action you
- have attempted has caused it to close.
- - Removed obsolete code from Connection.
- - The error message returned when the authentication scheme is unknown
- has been extended. It now reads:
- Authentication type ### not supported. Check that you have
- configured the pg_hba.conf file to include the client's IP
- address or Subnet, and is using a supported authentication
- scheme.
- - Connection.getMetaData() now caches the instance returned, so
- multiple calls will return the same instance.
- - Created a test application that tests the DatabaseMetaData and
- ResultSetMetaData classes.
- - Replaced getString(#).getBytes() with getBytes(#) which should speed
- things up, and reduce memory useage.
- - Optimised DatabaseMetaData.getProcedures(), and implemented patterns
- - Fixed NullPointerExceptions thrown when a field is null (Internal
- to the driver, not caused by results from the backend.
- DatabaseMetaData.getProcedures() is an example of a method that
- causes this):
- - ResultSetMetaData.getColumnName() now returns field# where
- # is the column name.
- - ResultSet.getObject() fixed
- - Fixed bug in psql example that was affected by null fields
- - DatabaseMetaData.getTables()
- - DatabaseMetaData.getPrimaryKeys() ran a query with an ambiguous field
- fixed.
- - getTypeInfo() optimised to increase speed and reduce memory useage
- - ResultSetMetaData.isCurrency() optimised and is now smaller.
- - Removed unnecessary code fromResultSetMetaData.getCatalogName()
- and getSchemaName().
- - Created new class postgresql.util.PGmoney to map the money type
- - Created new class postgresql.geometric.PGline to map the line type
-
diff --git a/src/interfaces/jdbc/Implementation b/src/interfaces/jdbc/Implementation
deleted file mode 100644
index ecfc626d9d6..00000000000
--- a/src/interfaces/jdbc/Implementation
+++ /dev/null
@@ -1,199 +0,0 @@
-This short document is provided to help programmers through the internals of
-the PostgreSQL JDBC driver.
-
-Last update: January 17 2001 peter@retep.org.uk
-
-build.xml
----------
-
-As of 7.1, we now use the ANT build tool to build the driver. ANT is part of
-the Apache/Jakarta project, and provides far superior build capabilities. You
-can find ANT from http://jakarta.apache.org/ant/index.html and being pure java
-it will run on any java platform.
-
-So far I've tested it under JDK1.2.x & JDK1.3 (both Linux & NT) but not yet with
-JDK1.1.8. Because of the latter the Makefile still works for now, but should be
-gone for 7.2.
-
-Anyhow, to build, simply type ant and the .jar file will be created and put into
-the jars directory.
-
-Tip: If you run ant from the sources root directory (ie: where the configure
-script is located) you will find another build.xml file. It is advised to run
-ant from that directory as it will then compile some auxilary Java/JDBC
-utilities that are located under the /contrib/retep directory.
-
-Makefile
---------
-
-Prior to 7.1, all compilation must be done by using Make. This is because there
-are three versions of the driver, one for JDBC1 (for JDK 1.1.x) and the others
-for JDBC2 (for JDK 1.2 or later, one standard and one enterprise).
-
-As of 7.1, ANT is the build tool of choice. Just compare Makefile and build.xml
-to see why! Make just isn't suited to Java.
-
-Building with just the JDK
---------------------------
-
-This is not advised, simply because you have to make sure you include the
-correct classes, and the fact that org.postgresql.Driver is built on the fly.
-Also, javac won't pick up all the classes because some (org.postgresql.geometric
-for example) are loaded dynamically.
-
-org/postgresql/Driver.java.in
------------------------------
-
-Because there are three versions of the driver, the org.postgresql.Driver class
-is built dynamically. To build correctly ANT copies the Driver.java.in file to
-Driver.java replacing certain values according to the required driver.
-
-The replaced values are of the format %VALUE%, ie: %MAJORVERSION% is replaced
-with 7 in the 7.1 version of the driver.
-
-postgresql.jar
---------------
-
-This jar file is produced by ANT, and contains the driver for your JDK platform.
-
-If you downloaded a precompiled binary from the web, you may find that the
-jar file will be named differently. These are identical to this file but are
-named according to the backend and jdk versions.
-
-The naming convention is of the form: jdbc-#.#-#.##.jar
-
-ie: for 7.1
- jdbc-7.1-1.1.jar JDBC Driver for JDK1.1.8
- jdbc-7.1-1.2.jar JDBC Driver for JDK1.2 & JDK1.3
- jdbc-7.1-1.2ent.jar JDBC Driver for JDK1.2 & JDK1.3 Enterprise Editions
-
-If in the future there are any 1.3 specific classes then there will be two new
-jar files.
-
-Note: All the precompiled binaries are built under Linux.
-
-jdbc.jpx
---------
-
-This is a JBuilder4 project file. It's here to allow JBuilder to be used to
-develop the driver. Mainly for it's Editor's features like syntax checking and
-auto-completion etc.
-
-IMPORTANT: You CAN NOT build the driver from within JBuilder. You must use ANT.
- This is because of the three versions of the JDK. If you try to use
- JBuilder, it will try to build everything, and it will just not work.
-
-Importing packages
-------------------
-
-In user code, you may have to import one or more packages, if and only if you
-are using the non jdbc extensions (like FastPath, or LargeObject).
-
-DO NOT import the postgresql, postgresql.jdbc1 or postgresql.jdbc2 packages!
-
-Internally, some classes will import the packages when there is a link between
-them and the other packages. However, the above rule still applies. It's there
-because Javac becomes confused between the different places that similar class
-names are present.
-
-However, there are places where they need to refer to classes in the postgresql
-package. In this case, import the individual classes, and not the entire
-package.
-
-ie: import postgresql.Field
-
- NOT import postgresql.*
-
-Package Layout
---------------
-
-The driver is split into several packages:
-
-org.postgresql core classes that can be accessed by user code
-org.postgresql.core core classes not normally used externally
-org.postgresql.jdbc1 classes used only in implementing JDBC 1
-org.postgresql.jdbc2 classes used only in implementing JDBC 2
-org.postgresql.fastpath FastPath to backend functions
-org.postgresql.geometric 2D Geometric types mapped to Java Objects
-org.postgresql.largeobject Low level Large Object access
-org.postgresql.util Utility classes
-
-
-Package org.postgresql
-------------------
-
-This package holds the core classes.
-
-Driver registers the driver when it's loaded, and determines which
- Connection class (in jdbc1 or jdbc2 packages) to use when
- connecting to a database.
-
-Field Used internally to represent a Field
-PG_Stream Used internally to manage the network stream.
-PostgresqlDataSource
- Exists in the Java2 Enterprise edition driver only and is the
- enterprise equivalent to Driver
-
- These classes contains common code that is not dependent to the
- two JDBC specifications.
-
-Connection Common code used in Connections, mainly Network Protocol stuff.
-ResultSet Common code used in ResultSet's
-
-Package org.postgresql.core
------------------------
-
-New in 7.1, this is where core classes (common to all versions) will exist. Any
-new class that would have gone into org.postgresql must go in here instead.
-
-BytePoolDim1 Handles a pool of byte[] arrays.
-BytePoolDim2 Handles a pool of byte[][] arrays
-MemoryPool Interface for managing MemoryPools. Not used (yet).
-ObjectPool Interface for an Object Pool
-SimpleObjectPool Class that implements ObjectPool and used by BytePoolDim#
-Encoding Character encoding logic, mainly for Connection and PG_Stream.
-
-Package org.postgresql.fastpath
----------------------------
-
-Fastpath Handles executing a function on the PostgreSQL Backend
-FastpathArg Defines an argument for a function call
-
-Package org.postgresql.geometric
-----------------------------
-
-PGbox Maps to postgresql type box
-PGcircle Maps to postgresql type circle
-PGline Maps to postgresql type line
-PGlseg Maps to postgresql type lseg
-PGpath Maps to postgresql type path
-PGpoint Maps to postgresql type point
-PGpolygon Maps to postgresql type polygon
-
-Package org.postgresql.jdbc1
-------------------------
-
-The classes in this package handle the JDBC 1 Specification, for JDK 1.1.x
-All interfaces in the java.sql package are present here.
-
-Package org.postgresql.jdbc2
-------------------------
-
-The classes in this package handle the JDBC 2 Specification, for JDK 1.2
-All interfaces in the java.sql, and javax.sql packages are present here.
-
-Package org.postgresql.largeobject
-------------------------------
-
-LargeObject Represents an open LargeObject
-LargeObjectManager Handles the opening and deleting of LargeObjects
-
-Package org.postgresql.util
------------------------
-
-PGmoney Maps to postgresql type money
-PGobject Used to represent postgresql types that have no Java equivalent
-PGtokenizer Helper class for the geometric types
-Serialize Used to serialise Java objects into tabes, rather than Blobs
-UnixCrypt Used to handle crypt authentication
-
diff --git a/src/interfaces/jdbc/jdbc.jpx b/src/interfaces/jdbc/jdbc.jpx
deleted file mode 100644
index 8c17267ca1b..00000000000
--- a/src/interfaces/jdbc/jdbc.jpx
+++ /dev/null
@@ -1,31 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/interfaces/jdbc/utils/CheckVersion.java b/src/interfaces/jdbc/utils/CheckVersion.java
deleted file mode 100644
index a2438cd4f9f..00000000000
--- a/src/interfaces/jdbc/utils/CheckVersion.java
+++ /dev/null
@@ -1,74 +0,0 @@
-package utils;
-
-/*
- * This little app checks to see what version of JVM is being used.
- * It does this by checking first the java.vm.version property, and
- * if that fails, it looks for certain classes that should be present.
- */
-public class CheckVersion
-{
- /*
- * Check for the existence of a class by attempting to load it
- */
- public static boolean checkClass(String c)
- {
- try
- {
- Class.forName(c);
- }
- catch (Exception e)
- {
- return false;
- }
- return true;
- }
-
- /*
- * This first checks java.vm.version for 1.1, 1.2 or 1.3.
- *
- * It writes jdbc1 to stdout for the 1.1.x VM.
- *
- * For 1.2 or 1.3, it checks for the existence of the javax.sql.DataSource
- * interface, and if found writes enterprise to stdout. If the interface
- * is not found, it writes jdbc2 to stdout.
- *
- * PS: It also looks for the existence of java.lang.Byte which appeared in
- * JDK1.1.0 incase java.vm.version is not heeded by some JVM's.
- *
- * If it can't work it out, it writes huho to stdout.
- *
- * The make file uses the written results to determine which rule to run.
- *
- * Bugs: This needs thorough testing.
- */
- public static void main(String args[])
- {
- String vmversion = System.getProperty("java.vm.version");
-
- System.out.println("postgresql.jdbc=" + System.getProperty("postgresql.jdbc"));
-
- // We are running a 1.1 JVM
- if (vmversion.startsWith("1.1"))
- {
- System.out.println("jdbc1");
- //System.exit(0);
- }
- else
- // We are running a 1.2 or 1.3 JVM
- if (vmversion.startsWith("1.2") ||
- vmversion.startsWith("1.3") ||
- checkClass("java.lang.Byte")
- )
- {
-
- // Check to see if we have the standard extensions. If so, then
- // we want the enterprise edition, otherwise the jdbc2 driver.
- if (checkClass("javax.sql.DataSource"))
- System.out.println("enterprise");
- else
- System.out.println("jdbc2");
- //System.exit(0);
- }
- System.setProperty("postgresql.jdbc", "yoyo");
- }
-}
diff --git a/src/interfaces/jdbc/utils/buildDriver b/src/interfaces/jdbc/utils/buildDriver
deleted file mode 100755
index 8cca1d9c36d..00000000000
--- a/src/interfaces/jdbc/utils/buildDriver
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/bin/sh
-#
-# $Id: buildDriver,v 1.2 2000/12/20 16:22:49 peter Exp $
-#
-# This script generates the org/postgresql/Driver.java file from the template
-# org/postgresql/Driver.java.in
-#
-# We do this because we need to include the version number from Makefile.global
-# and some other goodies.
-#
-# This used to be in Makefile, but as it's now done three times, it's better
-# to have it as a separate script.
-#
-# If you have any problems, please let us know ;-)
-#
-# Syntax: buildDriver version class
-#
-# Where:
-# version The version string from Makefile.global
-# class The class implementing java.sql.Connection
-# edition The driver edition being built
-# source The file to build. We assume that ${source}.in exists
-#
-
-VERSION=$1
-CLASS=$2
-EDITION=$3
-SOURCE=$4
-
-#---------------------------------------------------------------------------
-# Extract the version. This will work until version x.9 (and assuming we don't
-# have 7.10 etc). We only handle 1 digit for MINORVERSION to handle things like
-# 7.1devel etc
-#
-MAJORVERSION=`echo $VERSION | cut -f1 -d'.'`
-MINORVERSION=`echo $VERSION | cut -f2 -d'.' | cut -c1`
-
-#---------------------------------------------------------------------------
-# Now finally build the driver
-sed \
- -e "s/@JDBCCONNECTCLASS@/$CLASS/g" \
- -e "s/@VERSION@/$VERSION $EDITION/g" \
- -e "s/@MAJORVERSION@/$MAJORVERSION/g" \
- -e "s/@MINORVERSION@/$MINORVERSION/g" \
- <${SOURCE}.in \
- >$SOURCE
-#---------------------------------------------------------------------------
diff --git a/src/interfaces/jdbc/utils/changelog.pl b/src/interfaces/jdbc/utils/changelog.pl
deleted file mode 100644
index 3cba15aa91d..00000000000
--- a/src/interfaces/jdbc/utils/changelog.pl
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/perl
-
-while(<>) {
- chomp();
- s/\t+/ /g;
- if(substr($_,0,3) eq ' - ') {
- print "