1
0
mirror of https://github.com/postgres/postgres.git synced 2025-05-11 05:41:32 +03:00

This commit was manufactured by cvs2git to create branch 'REL7_3_STABLE'.

Sprout from master 2002-11-04 17:14:29 UTC Tom Lane <tgl@sss.pgh.pa.us> 'Remove extraneous semicolons after routine bodies.  These don't bother'
Cherrypick from master 2002-09-04 07:23:04 UTC Bruce Momjian <bruce@momjian.us> 'Brand 7.3.  Ready for beta!':
    contrib/xml/README
    contrib/retep/CHANGELOG
    contrib/retep/Implementation
    contrib/retep/Makefile
    contrib/retep/README
    contrib/retep/build.xml
    contrib/retep/data/cds.dtd
    contrib/retep/data/cds.xml
    contrib/retep/uk/org/retep/tools.properties
    contrib/retep/uk/org/retep/dtu/DCollection.java
    contrib/retep/uk/org/retep/dtu/DConstants.java
    contrib/xml/pgxml_dom.source
    contrib/retep/uk/org/retep/dtu/DElement.java
    contrib/retep/uk/org/retep/dtu/DEnvironment.java
    contrib/retep/uk/org/retep/dtu/DModule.java
    contrib/retep/uk/org/retep/dtu/DModuleXML.java
    contrib/retep/uk/org/retep/dtu/DNode.java
    contrib/retep/uk/org/retep/dtu/DProcessor.java
    contrib/retep/uk/org/retep/dtu/DTransform.java
    contrib/retep/uk/org/retep/tools/Tool.java
    contrib/retep/uk/org/retep/util/ExceptionDialog.java
    contrib/retep/uk/org/retep/util/Globals.java
    contrib/retep/uk/org/retep/util/Logger.java
    contrib/retep/uk/org/retep/util/Main.java
    contrib/retep/uk/org/retep/util/StandaloneApp.java
    contrib/retep/uk/org/retep/util/hba/Editor.java
    contrib/retep/uk/org/retep/util/misc/IPAddress.java
    contrib/retep/uk/org/retep/util/misc/PropertiesIO.java
    contrib/retep/uk/org/retep/util/misc/WStringTokenizer.java
    contrib/retep/uk/org/retep/util/models/HBATableModel.java
    contrib/retep/uk/org/retep/util/models/PropertiesTableModel.java
    contrib/retep/uk/org/retep/util/proped/PropertyEditor.java
    contrib/retep/uk/org/retep/xml/core/XMLFactory.java
    contrib/retep/uk/org/retep/xml/core/XMLFactoryException.java
    contrib/retep/uk/org/retep/xml/jdbc/XMLDatabase.java
    contrib/retep/uk/org/retep/xml/jdbc/XMLResultSet.java
    contrib/retep/uk/org/retep/xml/parser/TagListener.java
    contrib/retep/uk/org/retep/xml/test/XMLExport.java
    doc/src/sgml/libpgeasy.sgml
    doc/src/sgml/odbc.sgml
    contrib/xml/pgxml.source
    doc/src/sgml/recovery.sgml
    src/test/regress/expected/geometry-bsdi-precision.out
    contrib/retep/uk/org/retep/xml/parser/TagHandler.java
    doc/src/sgml/version.sgml
    doc/src/sgml/y2k.sgml
    contrib/retep/retep.jpx
    src/interfaces/jdbc/utils/CheckVersion.java
    src/interfaces/jdbc/utils/changelog.pl
    contrib/retep/uk/org/retep/util/hba/Main.java
    contrib/retep/uk/org/retep/util/hba/Record.java
    contrib/retep/uk/org/retep/util/proped/Main.java
    src/interfaces/jdbc/CHANGELOG
    src/interfaces/jdbc/Implementation
    src/interfaces/jdbc/utils/buildDriver
    src/interfaces/jdbc/jdbc.jpx
This commit is contained in:
CVS to git conversion script 2002-11-04 17:14:30 +00:00
parent 3f435f9e99
commit 503b41f6e5
56 changed files with 10592 additions and 0 deletions

7
contrib/retep/CHANGELOG Normal file
View File

@ -0,0 +1,7 @@
Fri Mar 02 16:08:00 GMT 2001 peter@retep.org.uk
- Started importing in the rest of the retep tools.
Tue Jan 23 10:19:00 GMT 2001 peter@retep.org.uk
- Finished the XML Export classes
- First of the test data suite now in CVS.

View File

@ -0,0 +1,116 @@
Retep Tools Implementation
--------------------------
The tools are designed to be put into a single jar file, but each one is
executable either individually or part of one single application.
To run the big application, you can either:
java -jar retepTools.jar
or with the retepTools.jar in the classpath run:
java uk.org.retep.tools.Main
Windows users: For you you can also double click the retepTools.jar as windows
will automatically run javac for you.
To run the individual tools, you must have the .jar file in your classpath and
then run the relevant Main class.
Tool Type Class
------------------------------------------------------------------------------
pg_hba.conf Editor/repairer Editor uk.org.retep.util.hba.Main
Properties Editor Editor uk.org.retep.util.proped.Main
Layout of the classes
---------------------
Simply, tools that work on property files (Java properties, resource files,
configuration settings - pg_hba.conf for example) go under uk.org.retep.util in
their own package. Other utility classes (like PropertyIO) go in to the
uk.org.retep.util.misc package except for certain ones where they are related.
ie: TableModels. In swing you have JTable which uses a TableModel to display
(and possibly update) some data. These go under uk.org.retep.util.models where
you will find PropertiesTableModel for example. This one allows a Properties
object to be displayed & updated.
Come core classes like Logger, ExceptionDialog etc go into the main
uk.org.retep.util package.
Directory/Package Contents
------------------------------------------------------------------------------
uk.org.retep Home of the tools.properties file
uk.org.retep.tools The main all-in-one application
uk.org.retep.dtu The Data Transform Unit
uk.org.retep.util Core utility classes
uk.org.retep.util.hba pg_hba.conf editor/repairer
uk.org.retep.util.misc Misc utility classes
uk.org.retep.util.models Swing table models
uk.org.retep.util.proped Property Editor
uk.org.retep.util.xml.core Basic XML Factory
uk.org.retep.util.xml.jdbc JDBC/XML interface
uk.org.retep.util.xml.parser Simple SAX parser
Structure of a tool
-------------------
Each tool has at least 2 base classes, and an entry in the tools.properties
file. For this example, I'll show you the Properties Editor:
Base package uk.org.retep.util.proped
Main tool class uk.org.retep.util.proped.PropertyEditor
Standalone class uk.org.retep.util.proped.Main
The main tool class is the entry point used by the main application. Because
they are used in a GUI, this class must extend javax.swing.JComponent and
implement the uk.org.retep.tools.Tool interface. (NB: You will find I always
use JPanel, but JComponent is used here so that any swing class can be used
you are not limited to JPanel.)
The standalone class is a basic static class that implements the main method.
It should extend the uk.org.retep.misc.StandaloneApp class and be written along
the lines of the following example:
import uk.org.retep.util.StandaloneApp;
import javax.swing.JComponent;
public class Main extends StandaloneApp
{
public Main(String[] args)
throws Exception
{
super(args);
}
public JComponent init()
throws Exception
{
// Your initialisation here. In this case the PropertyEditor
PropertyEditor panel = new PropertyEditor();
// do stuff here, ie load a file if supplied
// return the tool
return panel;
}
public static void main(String[] args)
throws Exception
{
Main main = new Main(args);
main.pack();
main.setVisible(true);
}
}
you will find a template in the uk.org.retep.util.Main class. Simply copy this
classes source, as it gives you the basic stub. Just add your own implementation
if init() like the one above. Look at the full Main class for the
PropertiesEditor to see how to get at the command line args.
By convention, the standalone class is named Main.

30
contrib/retep/Makefile Normal file
View File

@ -0,0 +1,30 @@
#-------------------------------------------------------------------------
#
# Makefile for contributed retep tools
#
# Copyright (c) 2001, PostgreSQL Global Development Group
#
# $Header: /cvsroot/pgsql/contrib/retep/Attic/Makefile,v 1.1 2001/07/06 23:07:20 petere Exp $
#
#-------------------------------------------------------------------------
subdir = contrib/retep
top_builddir = ../..
include $(top_builddir)/src/Makefile.global
all:
$(ANT) -buildfile $(srcdir)/build.xml all
install: installdirs
$(ANT) -buildfile $(srcdir)/build.xml install \
-Dinstall.directory=$(javadir)
installdirs:
$(mkinstalldirs) $(javadir)
uninstall:
$(ANT) -buildfile $(srcdir)/build.xml uninstall \
-Dinstall.directory=$(javadir)
clean distclean maintainer-clean:
$(ANT) -buildfile $(srcdir)/build.xml clean

35
contrib/retep/README Normal file
View File

@ -0,0 +1,35 @@
Before you ask what retepTools are, they are my personal suite of utilities.
About 90% of them are JDBC related (either they use JDBC, or I use them in
developing the JDBC driver).
Now, because of various reasons I won't go into now, in January 2001 I decided
to release the entire lot to the public. I could have used something like
SourceForge, but as they are mainly JDBC related I thought here is the best
place.
Now all (bar retepPDF, see end-note) will over the next few months be going
into the /contrib/retep directory. They range from simple XML Inport/Export
classes to entire sub-systems that can be plugged into applications.
All this lot were never released, so I'm placing them under PostgreSQL's
licence.
Please refer to Implementation for details of what package does what.
It all requires Java2SE (JDK1.2) as a minimum. I do have some plans for some
EJB tools later, so those will need Java2EE, but not yet ;-)
Peter Mount
peter@retep.org.uk
March 2 2001
retepPDF: This is not included for two reasons:
1: It's big and not really related in any way to PostgreSQL
2: More importantly, I (may be foolishly) released it some 3 years ago under
the LGPL. As a few people have added to it, it's not really possible to
change the licence, and I don't want to polute PostgreSQL's source tree ;-)
retepGraph: This was an old graphics library. It's been obsolete for 3 years
now, so it's not going in.

98
contrib/retep/build.xml Normal file
View File

@ -0,0 +1,98 @@
<?xml version="1.0"?>
<!--
build file to build the donated retep tools packages
$Header: /cvsroot/pgsql/contrib/retep/Attic/build.xml,v 1.8 2001/07/06 23:07:20 petere Exp $
-->
<!DOCTYPE project [
<!ENTITY jarname "retepTools.jar">
]>
<project name="retep" default="all" basedir=".">
<!-- set global properties for this build -->
<property name="srcdir" value="." />
<property name="builddir" value="build" />
<property name="package" value="uk/org/retep" />
<property name="jardir" value="jars" />
<!-- Some checks used to build dependent on the environment -->
<target name="checks">
<available property="jdk1.2+" classname="java.lang.ThreadLocal" />
<available property="jdk1.3+" classname="java.lang.StrictMath" />
<available property="jdk1.2e+" classname="javax.sql.DataSource" />
<available property="xml" classname="org.xml.sax.Parser" />
</target>
<target name="warning" depends="checks" unless="jdk1.2+">
<echo>
*** WARNING: Contributed retep tools need jdk1.2 or later.
*** Compilation NOT done
</echo>
</target>
<!-- default target -->
<target name="all">
<antcall target="jar" />
</target>
<!-- Builds the various jar files -->
<target name="jar" depends="compile">
<jar jarfile="${jardir}/&jarname;" whenempty="fail">
<fileset dir="${builddir}">
<include name="**/*.class" />
</fileset>
<fileset dir="${srcdir}">
<include name="**/*.properties" />
</fileset>
</jar>
</target>
<!-- Builds the XML Tools -->
<target name="compile" depends="checks,prepare,warning" if="jdk1.2+">
<javac srcdir="${srcdir}" destdir="${builddir}">
<include name="${package}/**" />
<exclude name="${package}/**" unless="jdk1.2+" />
</javac>
</target>
<!-- Prepares the build by creating a directory to place the class files -->
<target name="prepare">
<mkdir dir="${builddir}" />
<mkdir dir="${jardir}" />
</target>
<target name="install" depends="all" if="install.directory">
<copy todir="${install.directory}" overwrite="true" filtering="off">
<fileset dir="${jardir}">
<include name="&jarname;" />
</fileset>
</copy>
</target>
<target name="uninstall" if="install.directory">
<delete>
<fileset dir="${install.directory}">
<include name="&jarname;" />
</fileset>
</delete>
</target>
<!-- This target removes any class files from the build directory -->
<target name="clean">
<delete quiet="true" dir="${builddir}" />
<delete quiet="true" dir="${jardir}" />
</target>
</project>

View File

@ -0,0 +1,16 @@
<!ELEMENT album (track*)+>
<!ATTLIST album
title CDATA #IMPLIED
aid CDATA #IMPLIED
>
<!ELEMENT catalogue (group)>
<!ELEMENT group (album*)>
<!ATTLIST group
name CDATA #IMPLIED
>
<!ELEMENT track (#PCDATA)>
<!ATTLIST track
tid CDATA #IMPLIED
id CDATA #IMPLIED
>

2691
contrib/retep/data/cds.xml Normal file

File diff suppressed because it is too large Load Diff

55
contrib/retep/retep.jpx Normal file
View File

@ -0,0 +1,55 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--JBuilder XML Project-->
<project>
<property category="idl" name="ProcessIDL" value="false" />
<property category="runtime.0" name="RunnableType" value="com.borland.jbuilder.runtime.ApplicationRunner" />
<property category="runtime.0" name="application.class" value="uk.org.retep.util.hba.Main" />
<property category="runtime.0" name="application.parameters" value="-d2 pg_hba.conf" />
<property category="runtime.0" name="appserver.ejbJarsSaved" value="1" />
<property category="runtime.0" name="appserver.parameters" value="-jts -jns -jss -jdb" />
<property category="runtime.0" name="appserver.servername" value="ejbcontainer" />
<property category="runtime.0" name="appserver.vmparameters" value="" />
<property category="runtime.0" name="jsprunner.docbase" value="." />
<property category="runtime.0" name="jsprunner.jspfile" value="E%|/docs/java/xml/example6" />
<property category="sys" name="AuthorLabel" value="@author" />
<property category="sys" name="BackupPath" value="bak" />
<property category="sys" name="BeansInstantiate" value="false" />
<property category="sys" name="BraceStyle" value="1" />
<property category="sys" name="CheckStable" value="1" />
<property category="sys" name="Company" value="" />
<property category="sys" name="CompanyLabel" value="Company:" />
<property category="sys" name="Copyright" value="Copyright (c) 2001" />
<property category="sys" name="CopyrightLabel" value="Copyright:" />
<property category="sys" name="DefaultPackage" value="uk.org.retep.util.misc" />
<property category="sys" name="Description" value="" />
<property category="sys" name="DescriptionLabel" value="Description:" />
<property category="sys" name="DocPath" value="doc" />
<property category="sys" name="EventMatch" value="false" />
<property category="sys" name="EventStyle" value="1" />
<property category="sys" name="ExcludeClassEnabled" value="0" />
<property category="sys" name="InstanceVisibility" value="0" />
<property category="sys" name="JDK" value="java 1.3.0-C" />
<property category="sys" name="LastTag" value="0" />
<property category="sys" name="Libraries" value="JAXP;Oracle JDBC;JDK1.3 JRE" />
<property category="sys" name="MakeStable" value="0" />
<property category="sys" name="OutPath" value="build" />
<property category="sys" name="SourcePath" value="." />
<property category="sys" name="Title" value="" />
<property category="sys" name="TitleLabel" value="Title:" />
<property category="sys" name="Version" value="1.0" />
<property category="sys" name="VersionLabel" value="@version" />
<property category="sys" name="WorkingDirectory" value="." />
<node type="Package" name="uk.org.retep.dtu" />
<node type="Package" name="uk.org.retep.tools" />
<node type="Package" name="uk.org.retep.util" />
<node type="Package" name="uk.org.retep.xml.core" />
<node type="Package" name="uk.org.retep.xml.jdbc" />
<node type="Package" name="uk.org.retep.xml.parser" />
<file path="build.xml" />
<file path="CHANGELOG" />
<file path="Implementation" />
<file path="uk/org/retep/util/models/PropertiesTableModel.java" />
<file path="README" />
</project>

View File

@ -0,0 +1,228 @@
package uk.org.retep.dtu;
import uk.org.retep.xml.core.XMLFactory;
import uk.org.retep.xml.core.XMLFactoryException;
import java.io.IOException;
import java.util.Collection;
import java.util.Iterator;
public class DCollection implements Collection
{
protected int num,max,inc;
protected DElement elements[];
public DCollection()
{
this(10);
}
public DCollection(int aIncrement)
{
num=0;
max=0;
inc=aIncrement;
elements=null;
}
protected void resize()
{
if(num>=max) {
max+=inc;
DElement n[] = new DElement[max];
if(elements!=null) {
System.arraycopy(elements,0,n,0,elements.length);
}
elements=n;
}
}
public int size()
{
return num;
}
public boolean isEmpty()
{
return (num==0);
}
/**
* Checks the list using it's XML id.
*/
public synchronized boolean contains(Object parm1)
{
if(parm1 instanceof DElement) {
DElement e = (DElement) parm1;
int ei = e.getID();
// out of range?
if(ei<0 || ei>=num)
return false;
return elements[ei].equals(e);
}
return false;
}
public Iterator iterator()
{
return new iterator(this);
}
/**
* Inner class to implement an Iterator
*/
protected class iterator implements Iterator
{
protected DCollection c;
protected int i;
public iterator(DCollection aCollection)
{
c=aCollection;
i=0;
}
public boolean hasNext()
{
return i<c.size();
}
public Object next() {
return c.getElement(i++);
}
public void remove() {
}
}
public synchronized Object[] toArray()
{
Object o[] = new Object[num];
System.arraycopy(elements,0,o,0,num);
return o;
}
public Object[] toArray(Object[] parm1)
{
/**@todo: Implement this java.util.Collection method*/
throw new java.lang.UnsupportedOperationException("Method toArray() not yet implemented.");
}
/**
* Adds a node to the Collection, and sets it's ID to its position in the Collection
*/
public synchronized boolean add(Object parm1)
{
if(parm1 instanceof DElement) {
DElement e = (DElement) parm1;
// Do nothing if it's already in a Collection
if(e.getID()>-1) {
return false;
}
// Add to the Collection
resize();
e.setID(num);
elements[num++] = e;
return true;
}
return false;
}
public synchronized boolean remove(Object parm1)
{
if(parm1 instanceof DElement) {
DElement e = (DElement) parm1;
int ei = e.getID();
if(ei<0 || ei>=num)
return false;
// Mark the node as parentless
e.setID(-1);
// Now remove from the array by moving latter nodes, fixing their ids
// in the process
for(int j=ei,k=ei+1;k<num;j++,k++) {
elements[j]=elements[k];
elements[j].setID(j);
}
num--;
return true;
}
return false;
}
public boolean containsAll(Collection parm1)
{
/**@todo: Implement this java.util.Collection method*/
throw new java.lang.UnsupportedOperationException("Method containsAll() not yet implemented.");
}
public boolean addAll(Collection parm1)
{
/**@todo: Implement this java.util.Collection method*/
throw new java.lang.UnsupportedOperationException("Method addAll() not yet implemented.");
}
public boolean removeAll(Collection parm1)
{
/**@todo: Implement this java.util.Collection method*/
throw new java.lang.UnsupportedOperationException("Method removeAll() not yet implemented.");
}
public boolean retainAll(Collection parm1)
{
/**@todo: Implement this java.util.Collection method*/
throw new java.lang.UnsupportedOperationException("Method retainAll() not yet implemented.");
}
public synchronized void clear()
{
// Mark each node as parentless
for(int i=0;i<num;i++) {
elements[i].setID(-1);
}
// dispose the array
num=0;
max=0;
elements=null;
}
/**
* Returns the element with supplied id.
* @return element or null
*/
public synchronized DElement getElement(int id)
{
if(id<0 || id>=num)
return null;
return elements[id];
}
/**
* Repairs the collection, ensuring all id's are correct
*/
public synchronized void repair()
{
for(int i=0;i<num;i++) {
elements[i].setID(i);
}
}
public synchronized void saveXML(XMLFactory aFactory)
throws IOException, XMLFactoryException
{
for(int i=0;i<num;i++) {
elements[i].saveXML(aFactory);
}
}
}

View File

@ -0,0 +1,43 @@
package uk.org.retep.dtu;
public class DConstants
{
/**
* A global version number
*/
public static final String XML_VERSION_ID = "V7.1-2001-02-26";
/**
* XML Tag names
*/
public static final String XML_DISPLAYNAME= "DISPLAYNAME";
public static final String XML_FROM = "FROM";
public static final String XML_ID = "ID";
public static final String XML_MODULE = "MODULE";
public static final String XML_NODE = "NODE";
public static final String XML_TO = "TO";
public static final String XML_TRANSFORM = "TRANSFORM";
public static final String XML_TYPE = "TYPE";
public static final String XML_VERSION = "VERSION";
public static final String XML_X = "X";
public static final String XML_Y = "Y";
public static final int NOP = 0; // No operation or always run transform
public static final int SUCCESS = 1; // Run transform only if DNode.OK
public static final int ERROR = 2; // Run transform only if DNode.ERROR
/**
* Node types 20-39 reserved for Transformation types
*/
public static final int TRANSFORMBASE = 20;
/**
* Node types 20-99 reserved for Internal Node implementations
*/
public static final int INTERNALBASE = 50;
/**
* Node types 100+ are for user extensions
*/
public static final int USERBASE = 100;
}

View File

@ -0,0 +1,31 @@
package uk.org.retep.dtu;
import uk.org.retep.xml.core.XMLFactory;
import uk.org.retep.xml.core.XMLFactoryException;
import java.io.IOException;
public interface DElement
{
/**
* Fetch the unique ID of this Element
*/
public int getID();
/**
* Sets the unique id - normally set by DCollection
*/
public void setID(int id);
/**
* @return the type of the Element
*/
public int getType();
/**
* Set's the element type
*/
public void setType(int aType);
public void saveXML(XMLFactory aFactory) throws IOException, XMLFactoryException;
}

View File

@ -0,0 +1,30 @@
package uk.org.retep.dtu;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
public class DEnvironment
{
protected HashMap dsrc;
public DEnvironment()
{
dsrc=new HashMap();
}
public void addDataSource(String aKey,Object aObject)
{
dsrc.put(aKey,aObject);
}
public Object getDataSource(String aKey)
{
return dsrc.get(aKey);
}
public Iterator getDataSources()
{
return dsrc.values().iterator();
}
}

View File

@ -0,0 +1,97 @@
package uk.org.retep.dtu;
import uk.org.retep.xml.core.XMLFactory;
import uk.org.retep.xml.core.XMLFactoryException;
import uk.org.retep.xml.parser.TagListener;
import uk.org.retep.util.Logger;
import java.io.IOException;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Iterator;
/**
* DModule represents a programatic module of steps used within the DTU
*/
public class DModule implements Serializable
{
// The nodes and transitions between them
protected DCollection nodes;
protected String displayName;
public static final String DEFAULT_DISPLAYNAME = "unnamed module";
public DModule()
{
nodes=new DCollection();
displayName=DEFAULT_DISPLAYNAME;
Logger.log(Logger.DEBUG,"new DModule",this);
}
// Expensive!
public DNode getNode(int id)
{
return (DNode) nodes.getElement(id);
}
public DNode addNode(DNode aNode)
{
Logger.log(Logger.DEBUG,"DModule.addNode",aNode);
nodes.add(aNode);
return aNode;
}
public void removeNode(DNode aNode)
{
Logger.log(Logger.DEBUG,"DModule.removeNode",aNode);
nodes.remove(aNode);
}
public void clear()
{
Logger.log(Logger.DEBUG,"DModule.clear",this);
nodes.clear();
}
public void setDisplayName(String aName)
{
Logger.log(Logger.DEBUG,"DModule.setDisplayName",aName);
displayName = aName;
}
public String getDisplayName()
{
return displayName;
}
public Iterator iterator()
{
return nodes.iterator();
}
/**
* Writes an XML representation of this module to an XMLFactory. The caller
* must close the factory after use!
*/
public synchronized void saveXML(XMLFactory aFactory)
throws IOException, XMLFactoryException
{
Logger.log(Logger.DEBUG,"DModule.saveXML start",this);
Iterator it;
aFactory.startTag(DConstants.XML_MODULE);
aFactory.addAttribute(DConstants.XML_DISPLAYNAME,displayName);
aFactory.addAttribute(DConstants.XML_VERSION,DConstants.XML_VERSION_ID);
// The nodes
nodes.saveXML(aFactory);
// The transforms
//trans.saveXML(aFactory);
aFactory.endTag(); // MODULE
Logger.log(Logger.DEBUG,"DModule.saveXML end",this);
}
}

View File

@ -0,0 +1,233 @@
package uk.org.retep.dtu;
import uk.org.retep.xml.core.XMLFactory;
import uk.org.retep.xml.core.XMLFactoryException;
import uk.org.retep.xml.parser.TagHandler;
import uk.org.retep.xml.parser.TagListener;
import uk.org.retep.util.Logger;
import java.io.CharArrayWriter;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.ArrayList;
import org.xml.sax.InputSource;
import org.xml.sax.Parser;
import org.xml.sax.SAXException;
import javax.xml.parsers.ParserConfigurationException;
public class DModuleXML implements TagListener
{
protected TagHandler handler;
protected DModule module = null;
protected DNode node = null;
protected DTransform trans = null;
protected ArrayList txmap;
public DModuleXML()
{
handler = new TagHandler();
handler.addTagListener(this);
txmap = new ArrayList();
Logger.log(Logger.DEBUG,"DModuleXML initialised");
}
public TagHandler getTagHandler()
{
return handler;
}
/**
* Used to optimise the switch handling in tagStart.
*
* The values of each T_* constant must match the corresponding element no
* in the tags static array.
*/
private static final int T_DEFAULT=-1;
private static final int T_MODULE =0;
private static final int T_NODE =1;
private static final int T_TRANS =2;
private static final String tags[] = {
DConstants.XML_MODULE,
DConstants.XML_NODE,
DConstants.XML_TRANSFORM
};
/**
* This is called when a tag has just been started.
* <p><b>NB:</b> args is volatile, so if you use it beyond the lifetime of
* this call, then you must make a copy of the HashMap (and not use simply
* store this HashMap).
* @param level The number of tags above this
* @param tag The tag name
* @param args A HashMap of any arguments
*/
public void tagStart(int level,String tag,HashMap args)
{
Logger.log(Logger.DEBUG,"DModuleXML.tagStart",tag);
// Prefetch some common attributes
String sType = (String) args.get(DConstants.XML_TYPE);
String sX = (String) args.get(DConstants.XML_X);
String sY = (String) args.get(DConstants.XML_Y);
int type=-1,x=-1,y=-1;
if(sType!=null) {
type = Integer.parseInt(sType);
}
if(sX!=null) {
y = Integer.parseInt(sX);
}
if(sY!=null) {
x = Integer.parseInt(sY);
}
// Match the tag against the tags array (used for switch() )
int tagID=T_DEFAULT;
for(int i=0;i<tags.length;i++) {
if(tag.equals(tags[i])) {
tagID=i;
}
}
switch(tagID)
{
// The main module tag
case T_MODULE:
module = new DModule();
String sDisplayName = (String) args.get(DConstants.XML_DISPLAYNAME);
if(sDisplayName!=null) {
module.setDisplayName(sDisplayName);
}
break;
// Basic nodes
case T_NODE:
node = new DNode();
node.setType(type);
module.addNode(node);
break;
// Basic transforms
case T_TRANS:
trans = new DTransform();
trans.setType(type);
// When finished we fix the transforms
int to = Integer.parseInt((String) args.get(DConstants.XML_TO));
txmap.add(new tx(node,trans,to));
break;
default:
// ignore unknown tags for now
break;
}
}
protected class tx
{
public DNode node;
public DTransform transform;
public int toID;
public tx(DNode aNode,DTransform aTransform,int aID)
{
node=aNode;
transform=aTransform;
toID=aID;
}
}
/**
* This method is called by ContHandler to process a tag once it has been
* fully processed.
* <p><b>NB:</b> content is volatile, so you must copy its contents if you use
* it beyond the lifetime of this call.
* @param content CharArrayWriter containing the content of the tag.
*/
public void tagContent(CharArrayWriter content)
{
// Ignore
}
public void fixTransforms()
{
DNode to;
Iterator it = txmap.iterator();
while(it.hasNext()) {
tx x = (tx) it.next();
//Logger.log(Logger.DEBUG,"Fixing transform "+x.toID,x.transform,Integer.toString(x.node.getID()),Integer.toString(module.getNode(x.toID).getID()));
to = module.getNode(x.toID);
x.transform.setFrom(x.node);
x.transform.setTo(to);
//to.setFrom(x.transform);
}
}
/**
* Parse an InputSource and return the contained module.
* @return DModule loaded, null if the xml file does not contain a module.
*/
public DModule parse(InputSource is)
throws IOException,SAXException
{
getTagHandler().parse(is);
fixTransforms();
return module;
}
/**
* Parse an uri and return the contained module.
* @return DModule loaded, null if the xml file does not contain a module.
*/
public DModule parse(String uri)
throws IOException,SAXException
{
getTagHandler().parse(uri);
fixTransforms();
return module;
}
/**
* Debug test - read xml from one file and save to another.
*/
public static void main(String args[]) throws Exception
{
if(args.length!=2) {
System.err.println("Syntax: java DModuleXML in-file out-file");
System.exit(1);
}
Logger.setLevel(Logger.DEBUG);
Logger.log(Logger.INFO,"DModuleXML Read test1.xml");
DModuleXML dm = new DModuleXML();
DModule module = dm.parse(new InputSource(new FileInputStream(args[0])));
Logger.log(Logger.INFO,"Parse complete");
Logger.log(Logger.INFO,"DModuleXML Write XML");
FileWriter fw = new FileWriter(args[1]);
module.saveXML(new XMLFactory(fw));
fw.close();
Logger.log(Logger.INFO,"Write complete");
DProcessor.run(module);
}
}

View File

@ -0,0 +1,233 @@
package uk.org.retep.dtu;
import uk.org.retep.util.Logger;
import uk.org.retep.xml.core.XMLFactory;
import uk.org.retep.xml.core.XMLFactoryException;
import java.io.IOException;
import java.io.Serializable;
import java.util.Iterator;
/**
* This is the base class for all nodes.
*/
public class DNode implements DElement, Serializable
{
// The id of this node
protected int id;
// The type of this node
protected int type;
protected int x,y;
public static final int OK = 0; // Node last ran fine
public static final int ERROR = 1; // Node failed on last run
/**
* This type of node does nothing
*/
public static int NOP = 0; // No action
public DNode()
{
this(NOP);
}
public DNode(int aType)
{
id=-1;
type=aType;
// Init the transform linkage
mf=mt=5;
nf=nt=0;
fn = new DTransform[mf];
tn = new DTransform[mt];
Logger.log(Logger.DEBUG,"new DNode");
}
public int getID()
{
return id;
}
public void setID(int aID)
{
id=aID;
Logger.log(Logger.DEBUG,"DNode.setID",aID);
}
public int getType()
{
return type;
}
public void setType(int aType)
{
type=aType;
Logger.log(Logger.DEBUG,"DNode.setType",aType);
}
/**
*/
public void saveXML(XMLFactory aFactory)
throws IOException, XMLFactoryException
{
Logger.log(Logger.DEBUG,"DNode.saveXML start",this);
Iterator it;
aFactory.startTag(DConstants.XML_NODE);
aFactory.addAttribute(DConstants.XML_ID,new Integer(getID()));
aFactory.addAttribute(DConstants.XML_TYPE,new Integer(getType()));
// used for display only
aFactory.addAttribute(DConstants.XML_X,new Integer(getX()));
aFactory.addAttribute(DConstants.XML_Y,new Integer(getY()));
// Save the transforms here (only the from list required)
for(int i=0;i<nf;i++) {
fn[i].saveXML(aFactory);
}
aFactory.endTag(); // NODE
Logger.log(Logger.DEBUG,"DNode.saveXML finish",this);
}
public void setPosition(int aX,int aY)
{
x=aX;
y=aY;
}
public int getX()
{
return x;
}
public int getY()
{
return y;
}
public void setX(int aX)
{
x=aX;
}
public void setY(int aY)
{
y=aY;
}
/**
* This must be overidden to do something
* @return Return status
*/
public int run(DEnvironment env)
{
return OK;
}
/**
* Node Transforms...
*/
protected int nf,mf,nt,mt;
protected DTransform fn[],tn[];
/**
* Executes the transform
*/
public DTransform getTransform(int aID)
{
return tn[aID];
}
/**
* @return number of transforms
*/
public int getFromTransforms()
{
return nf;
}
/**
* @return number of transforms
*/
public int getToTransforms()
{
return nt;
}
/**
* Adds a transform to this node (called by DTransform)
*/
protected synchronized void setFrom(DTransform aTransform)
{
for(int i=0;i<nf;i++) {
if(fn[i].equals(aTransform)) {
return;
}
}
if(nf>=mf) {
mf+=5;
DTransform nn[] = new DTransform[mf];
System.arraycopy(fn,0,nn,0,nf);
fn=nn;
}
fn[nf++]=aTransform;
}
/**
* Adds a transform to this node (called by DTransform)
*/
protected synchronized void setTo(DTransform aTransform)
{
for(int i=0;i<nt;i++) {
if(tn[i].equals(aTransform)) {
return;
}
}
if(nt>=mt) {
mt+=5;
DTransform nn[] = new DTransform[mt];
System.arraycopy(tn,0,nn,0,nt);
tn=nn;
}
tn[nt++]=aTransform;
}
/**
* Removes a transform (called by DTransform)
*/
protected synchronized void removeFrom(DTransform aTransform)
{
for(int i=0;i<nf;i++) {
if(tn[i].equals(aTransform)) {
for(int j=i+1;j<nf;j++,i++) {
fn[i]=fn[j];
}
nf--;
return;
}
}
}
/**
* Removes a transform (called by DTransform)
*/
protected synchronized void removeTo(DTransform aTransform)
{
for(int i=0;i<nt;i++) {
if(tn[i].equals(aTransform)) {
for(int j=i+1;j<nt;j++,i++) {
tn[i]=tn[j];
}
nt--;
return;
}
}
}
}

View File

@ -0,0 +1,191 @@
package uk.org.retep.dtu;
import uk.org.retep.util.Logger;
import java.util.Iterator;
/**
* This class processes a Module. It's implemented as a Thread and there can
* be many threads running on a single module
*/
public class DProcessor
{
/**
* This starts a module
*/
public static DProcessor run(DModule aModule) {
// 3600000 is 1 hour in milliseconds
return run(aModule,3600000);
}
/**
* This starts a module
*/
public static DProcessor run(DModule aModule,long timeout) {
return new DProcessor(aModule,timeout);
}
protected DProcessor(DModule aModule,long timeout) {
ThreadGroup group = new ThreadGroup(aModule.getDisplayName()+" DProcessor");
// Setup the environment
DEnvironment env = new DEnvironment();
// loop for any nodes without a transform pointing _to_ it.
Iterator it = aModule.iterator();
while(it.hasNext()) {
DNode node = (DNode) it.next();
// Only start if we have no predecessor
if(node.getFromTransforms()==0) {
proc proc = new proc(group,aModule,node,env);
proc.start();
}
}
// Now wait until all the threads have finished
boolean running=true;
try {
int cnt=1; // must loop at least once!
while(cnt>0) {
int numThreads = group.activeCount();
Thread threads[] = new Thread[numThreads];
cnt = group.enumerate(threads,false);
//Logger.log(Logger.DEBUG,"Waiting on threads",cnt);
while(cnt>0) {
//Logger.log(Logger.DEBUG,"Waiting on thread",cnt);
threads[--cnt].join(timeout);
}
Logger.log(Logger.DEBUG,"All threads appear to have died, retesting");
}
} catch(InterruptedException ie) {
Logger.log(Logger.ERROR,"DProcessor, exception caught while waiting for threads to die",ie);
}
// finally close any open datasources
Logger.log(Logger.DEBUG,"DProcessor cleanup");
Logger.log(Logger.DEBUG,"DProcessor finished");
}
class proc implements Runnable
{
protected DModule module; // The module being run
protected DNode pc; // current Program Counter
protected DEnvironment env; // Shared environment
// Used when launching new threads only
protected DTransform trans; // If not null, a transform to run first
protected int status;
protected Thread thread;
/**
* Start processing from DNode aNode. This is called by DProcessor at
* initialisation only.
*/
protected proc(ThreadGroup aGroup,DModule aModule,DNode aNode,DEnvironment aEnv)
{
// aGroup will be null when forking...
if(aGroup==null) {
thread = new Thread(this);
} else {
thread = new Thread(aGroup,this);
}
module = aModule;
pc = aNode;
env = aEnv;
}
/**
* Start processing the DTransform aTransform from aNode (does not execute
* the node). This is called by this inner class itself when forking new
* threads.
*/
protected proc(DModule aModule,DNode aNode,DEnvironment aEnv,DTransform aTransform,int aStatus)
{
this(null,aModule,aNode,aEnv);
trans = aTransform;
status = aStatus;
}
/**
* Start this thread
*/
public void start()
{
thread.start();
}
public void run()
{
// Handle an initial transform first. It's used when a new Thread was created.
if(trans!=null) {
transform(trans,false,status);
trans=null;
}
while(pc!=null) {
//Logger.log(Logger.DEBUG,"running node ",pc.getID());
// Process the node
int status = pc.run(env);
//Logger.log(Logger.DEBUG," status ",status);
// Now the transforms. This thread continues with the first one that runs,
// but any others that will also run will do so in their own thread.
// If no transform runs (or there are none), then the thread will die.
int numTrans = pc.getToTransforms();
boolean fork=false;
for(int i=0;i<numTrans;i++) {
fork = transform(pc.getTransform(i),fork,status);
//Logger.log(Logger.DEBUG,"fork",fork?"true":"false");
}
//Logger.log(Logger.DEBUG,"fork",fork?"true":"false");
if(!fork) {
// No transforms ran, so we quit this thread
pc=null;
}
// This lets the other threads a chance to run
Thread.yield();
}
}
/**
* This executes a transform
* @param aTransform DTransform to execute
* @param fork true then a new process is triggered
* @param status The return status of the previous node
* @return true if the transform ran or a fork occured.
*/
public boolean transform(DTransform aTransform,boolean fork,int status)
{
// Check to see if the transform will run (based on the calling nodes return
// status
if(!aTransform.willRun(status,env)) {
return false;
}
if(fork) {
// Create the new processor but this time we want a transform to run
proc proc = new proc(module,pc,env,aTransform,status);
return true;
}
if(aTransform.run(env)) {
pc=aTransform.getTo();
return true;
}
return false;
}
} // class proc
}

View File

@ -0,0 +1,133 @@
package uk.org.retep.dtu;
import uk.org.retep.xml.core.XMLFactory;
import uk.org.retep.xml.core.XMLFactoryException;
import java.io.IOException;
/**
* This manages the links between two nodes.
*/
public class DTransform
{
protected DNode from,to;
protected int type;
public DTransform()
{
this(null,null);
}
public DTransform(DNode aFrom,DNode aTo)
{
from=aFrom;
to=aTo;
}
public int getType()
{
return type;
}
public void setType(int aType)
{
type=aType;
}
public void setFrom(DNode aNode)
{
if(from!=null) {
from.removeTo(this);
}
from=aNode;
from.setTo(this);
}
public DNode getFrom()
{
return from;
}
public void setTo(DNode aNode)
{
if(to!=null) {
to.removeFrom(this);
}
to=aNode;
aNode.setFrom(this);
}
public DNode getTo()
{
return to;
}
/**
* This ensures the minimum tag/attributes are generated.
* To extend, extend saveCustomXML() which is called by this method
* appropriately.
*/
public final void saveXML(XMLFactory aFactory)
throws IOException, XMLFactoryException
{
// Bare minimum is the tag type, and the destination node's id
aFactory.startTag(DConstants.XML_TRANSFORM);
aFactory.addAttribute(DConstants.XML_TYPE,Integer.toString(getType()));
aFactory.addAttribute(DConstants.XML_TO,Integer.toString(to.getID()));
saveCustomXML(aFactory);
aFactory.endTag();
}
/**
* Custom transformations must overide this method and write direct to the
* supplied XMLFactory. A tag is currently open when the method is called, but
* is closed immediately this method exits.
*/
public void saveCustomXML(XMLFactory aFactory)
throws IOException, XMLFactoryException
{
// Default method does nothing...
}
/**
* Checks to see if we should run based on the calling nodes status. Overide
* this to add this sort of checking.
*
* @param status The return status of the calling node
* @param env DEnvironment we are using
* @return true if we will run.
*/
public boolean willRun(int status,DEnvironment env)
{
switch(getType())
{
// NOP is the generic link - always run
case DConstants.NOP:
return true;
// SUCCESS only runs if the previous node was OK
case DConstants.SUCCESS:
return status==DNode.OK;
case DConstants.ERROR:
return status==DNode.ERROR;
// Default - always run. Overide the method if you need to change this
default:
return true;
}
}
/**
* Overide this for a transform to do something.
* @param env DEnvironment we are using
* @return true if we actually ran. DProcessor will jump to the getTo() node if so.
*/
public boolean run(DEnvironment env)
{
return true;
}
}

View File

@ -0,0 +1,8 @@
#Written by Retep PropertyEditor
#Sat Mar 03 16:29:44 GMT+00:00 2001
tool.hba=pg_hba.conf editor
tool.hba.class=uk.org.retep.util.hba.Editor
tool.proped.class=uk.org.retep.util.proped.PropertyEditor
tool.hba.type=Misc
tool.proped.type=Misc
tool.proped=Properties Editor

View File

@ -0,0 +1,33 @@
package uk.org.retep.tools;
import javax.swing.JMenuBar;
/**
* Tools can implement this interface to provide the parent manager (the big
* application or the StandaloneApp class) enough details to display them.
*
* If a tool does not implement this class, it gets basic treatment.
*
* @author
* @version 1.0
*/
public interface Tool
{
/**
* @return the JMenuBar for this tool, null if none.
*/
public JMenuBar getMenuBar();
/**
* @return the title string to go into the JFrame/JInternalFrame's title bar.
*/
public String getTitle();
/**
* Called by StandaloneApp to indicate this is within a StandaloneApp.
* You should assume you are not in standalone mode until this is called.
*/
public void setStandaloneMode(boolean aMode);
}

View File

@ -0,0 +1,141 @@
package uk.org.retep.util;
import java.awt.*;
import javax.swing.*;
import java.awt.event.*;
/**
* Display an Exception to the user
* @author
* @version 1.0
*/
public class ExceptionDialog extends JDialog
{
// This is used to store the parent frame.
// Classes like StandaloneApp set's this so that the
// displayException() method can work without knowing/finding out
// the parent Frame/JFrame.
private static Frame globalFrame;
private static ExceptionDialog globalDialog;
JPanel panel1 = new JPanel();
BorderLayout borderLayout1 = new BorderLayout();
JTextArea message = new JTextArea();
JPanel jPanel1 = new JPanel();
JButton jButton1 = new JButton();
GridLayout gridLayout1 = new GridLayout();
JButton jButton2 = new JButton();
JLabel jLabel1 = new JLabel();
public ExceptionDialog(Frame frame, String title, boolean modal)
{
super(frame, title, modal);
try
{
jbInit();
pack();
}
catch(Exception ex)
{
ex.printStackTrace();
}
}
public ExceptionDialog()
{
this(null, "", false);
}
void jbInit() throws Exception
{
panel1.setLayout(borderLayout1);
message.setBorder(BorderFactory.createLoweredBevelBorder());
message.setText("jTextArea1");
message.setBackground(Color.lightGray);
message.setEditable(false);
jButton1.setText("Close");
jButton1.addActionListener(new java.awt.event.ActionListener()
{
public void actionPerformed(ActionEvent e)
{
jButton1_actionPerformed(e);
}
});
jPanel1.setLayout(gridLayout1);
jButton2.setEnabled(false);
jButton2.setText("Stack Trace");
jLabel1.setEnabled(false);
getContentPane().add(panel1);
panel1.add(message, BorderLayout.CENTER);
this.getContentPane().add(jPanel1, BorderLayout.SOUTH);
jPanel1.add(jButton2, null);
jPanel1.add(jLabel1, null);
jPanel1.add(jButton1, null);
}
/**
* Sets the Frame used to display all dialog boxes.
*/
public static void setFrame(Frame aFrame)
{
globalFrame = aFrame;
}
/**
* Displays a dialog based on the exception
* @param ex Exception that was thrown
*/
public static void displayException(Exception ex)
{
displayException(ex,null);
}
/**
* Displays a dialog based on the exception
* @param ex Exception that was thrown
*/
public static void displayException(Exception ex,String msg)
{
String cname = ex.getClass().getName();
int i=cname.lastIndexOf(".");
displayException(cname.substring(i+1),ex,msg);
}
public static void displayException(String title,Exception ex)
{
displayException(title,ex,null);
}
public static void displayException(String title,Exception ex,String msg)
{
Logger.log(Logger.ERROR,title,ex.getMessage());
// Default to a stack trace if no frame set
if(globalFrame==null) {
ex.printStackTrace();
return;
}
if(globalDialog==null) {
globalDialog=new ExceptionDialog(globalFrame,title,true);
globalDialog.pack();
}
globalDialog.setTitle(title);
if(msg!=null) {
globalDialog.message.setText(msg);
globalDialog.message.append(":\n");
}
globalDialog.message.append(ex.getMessage());
globalDialog.pack();
globalDialog.setVisible(true);
}
void jButton1_actionPerformed(ActionEvent e)
{
setVisible(false);
}
}

View File

@ -0,0 +1,170 @@
package uk.org.retep.util;
import uk.org.retep.util.Logger;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Properties;
/**
* This is a Singleton that stores global properties, command line arguments
* etc.
*
* All tools are guranteed that this will exist.
*
* @author
* @version 1.0
*/
public class Globals
{
private static final Globals SINGLETON = new Globals();
private Hashtable global= new Hashtable();
private Properties props = new Properties();
private ArrayList args = new ArrayList();
private Globals()
{
}
public static Globals getInstance()
{
return SINGLETON;
}
/**
* Retrieves an object from the global pool
* @param aKey key of the object
* @return The object, null if not found
*/
public Object get(Object aKey)
{
return global.get(aKey);
}
/**
* Stores an object into the global pool
* @param aKey key of the object
* @param aObj the object to store
* @return aObj
*/
public Object put(Object aKey,Object aObj)
{
return global.put(aKey,aObj);
}
/**
* Returns a Properties object of all properties
*/
/*
public Properties getProperties()
{
return props;
}
*/
/**
* @param aProp a property supplied to the command line
* @return property or NULL if not present
*/
public String getProperty(String aProp)
{
return props.getProperty(aProp);
}
/**
* @param aProp a property supplied to the command line
* @param aDefault default to return if property was not supplied
* @return property value
*/
public String getProperty(String aProp,String aDefault)
{
return props.getProperty(aProp,aDefault);
}
/**
* @param aID ID of the argument, 0 ... getArgumentCount()-1
* @return argument
*/
public String getArgument(int aID)
{
return (String) args.get(aID);
}
/**
* Returns an array of String objects representing the arguments
*/
public String[] getArguments()
{
return (String[]) args.toArray();
}
/**
* Returns an Iterator of the arguments
*/
public Iterator getArgumentIterator()
{
return args.iterator();
}
/**
* @return number of arguments
*/
public int getArgumentCount()
{
return args.size();
}
/**
* Parses the command line arguments
*/
public void parseArguments(String[] aArgs)
throws Exception
{
for(int i=0;i<aArgs.length;i++) {
String arg = aArgs[i];
if(arg.startsWith("--") || arg.startsWith("-")) {
if(arg.length()>1) {
// Split the option at the first '=' char if any
int s = arg.startsWith("--") ? 2 : 1 ; // -- or -
int e = arg.indexOf("=");
String key,val;
if(e>s) {
// Format: -key=value
key=arg.substring(s,e-1);
val=arg.substring(e+1);
} else if(e>-1 && e<=s) {
// Can't have a property without a key!
throw new Exception("Invalid option -=");
} else {
key=arg.substring(s);
val=""; // can't be null
}
if(key.equals("d")) {
// -d | --d is reserved to set the Logger level
int level=0;
if(!val.equals("")) {
level=Integer.parseInt(val);
}
Logger.setLevel(level);
} else {
// Add all other properties into the Properties object
props.put(key,val);
Logger.log(Logger.INFO,"Argument",key,val);
}
} else {
// Just a - on its own?
System.out.println("Unknown option: -");
}
} else {
// Add the argument to the array
args.add(arg);
}
}
}
}

View File

@ -0,0 +1,150 @@
package uk.org.retep.util;
import java.io.CharArrayWriter;
import java.io.PrintWriter;
public class Logger
{
protected static int level;
protected static PrintWriter logger;
public static final int NONE = -1;
public static final int INFO = 0;
public static final int ERROR = 1;
public static final int DEBUG = 2;
public static final int ALL = 3;
static {
level = NONE;
logger = null;
};
private static final String levels[] = {
"INFO :",
"ERROR:",
"DEBUG:",
"ALL :"
};
public static void setLevel(int aLevel)
{
// Incase we have not yet set a logger
if(logger==null) {
logger = new PrintWriter(System.out);
}
if(aLevel<NONE) {
aLevel=NONE;
} else if(aLevel>ALL) {
aLevel=ALL;
}
level=aLevel;
if(level>NONE) {
log(INFO,"Log level changed to",level,levels[level]);
}
}
public static void setLogger(PrintWriter pw)
{
if(logger!=null) {
try {
logger.flush();
logger.close();
} catch(Exception ex) {
logger=pw;
log(ERROR,"Exception while closing logger",ex);
}
}
logger=pw;
}
public static void log(String msg)
{
log(INFO,msg);
}
public static void log(int aLevel,String msg)
{
write(aLevel,msg,null);
}
public static void log(int aLevel,String msg,int arg1)
{
Object o[] = {new Integer(arg1)};
write(aLevel,msg,o);
}
public static void log(int aLevel,String msg,int arg1,Object arg2)
{
Object o[] = {new Integer(arg1),arg2};
write(aLevel,msg,o);
}
public static void log(int aLevel,String msg,double arg1)
{
Object o[] = {new Double(arg1)};
write(aLevel,msg,o);
}
public static void log(int aLevel,String msg,double arg1,Object arg2)
{
Object o[] = {new Double(arg1),arg2};
write(aLevel,msg,o);
}
public static void log(int aLevel,String msg,Object arg1)
{
Object o[] = {arg1};
write(aLevel,msg,o);
}
public static void log(int aLevel,String msg,Object arg1,Object arg2)
{
Object o[] = {arg1,arg2};
write(aLevel,msg,o);
}
public static void log(int aLevel,String msg,Object arg1,Object arg2,Object arg3)
{
Object o[] = {arg1,arg2,arg3};
write(aLevel,msg,o);
}
public static void log(int aLevel,String msg,Throwable t)
{
CharArrayWriter buffer = new CharArrayWriter();
PrintWriter printWriter = new PrintWriter(buffer);
t.printStackTrace(printWriter);
Object o[] = {buffer.toString()};
buffer.close();
write(aLevel,msg,o);
}
private static void write(int aLevel,String aMsg,Object args[])
{
// Can't be above ALL
if(aLevel>ALL) {
aLevel=ALL;
}
// Ignore if below or equal to NONE
if(aLevel<INFO || aLevel>level) {
return;
}
logger.print("Logger:");
logger.print(levels[aLevel]);
logger.print(aMsg);
if(args!=null) {
for(int a=0;a<args.length;a++) {
logger.print(":");
logger.print(args[a]);
}
}
logger.println();
logger.flush();
}
}

View File

@ -0,0 +1,42 @@
package uk.org.retep.util;
import uk.org.retep.util.StandaloneApp;
import javax.swing.JComponent;
import javax.swing.JLabel;
/**
* This is a template for your own Tools. Copy not extend this class. Please
* refer to Implementation for details.
*
* All you need to to is implement the init() method.
*
* $Id: Main.java,v 1.1 2001/03/05 09:15:36 peter Exp $
*/
public class Main extends StandaloneApp
{
public Main(String[] args)
throws Exception
{
super(args);
}
public JComponent init()
throws Exception
{
// Create your tool here, then do things like load files based on the
// command line arguments. Then return that tool.
// NB: This just allows us to compile. You're implementation must return
// the Tool itself.
return new JLabel("Replace with your own tool!");
}
public static void main(String[] args)
throws Exception
{
Main main = new Main(args);
main.pack();
main.setVisible(true);
}
}

View File

@ -0,0 +1,85 @@
package uk.org.retep.util;
import uk.org.retep.tools.Tool;
import uk.org.retep.util.Globals;
import uk.org.retep.util.ExceptionDialog;
import java.awt.*;
import javax.swing.*;
import java.awt.event.*;
/**
* This provides the basic services needed for enabling some of the tools to
* run in a Stand-alone fassion.
*
* Note: Because it's designed for standalone use, if this window is closed,
* the JVM is terminated. Do not use for normal application use.
*
* $Id: StandaloneApp.java,v 1.2 2001/03/05 10:18:48 peter Exp $
*
* @author
* @version 1.0
*/
public abstract class StandaloneApp extends JFrame
{
public StandaloneApp(String[] aArgs)
throws Exception
{
super(); // Initialise JFrame
// Allow dialogs to work with us
ExceptionDialog.setFrame(this);
// Add a window listener
this.addWindowListener(new java.awt.event.WindowAdapter()
{
public void windowClosing(WindowEvent e)
{
System.exit(0);
}
});
// Parse the command line arguments
Globals.getInstance().parseArguments(aArgs);
// Now initialise this tool (init is overidden)
JComponent tool = null;
try {
tool = init();
} catch(Exception ex) {
ex.printStackTrace();
System.exit(1);
}
// Now add to this frame
this.getContentPane().add(tool, BorderLayout.CENTER);
// Finally call the Tool interface
if(tool instanceof Tool) {
Tool t = (Tool) tool;
// Notify the tool we are a standalone
t.setStandaloneMode(true);
// Fetch the title
setTitle(t.getTitle());
// and a MenuBar (if needed)
JMenuBar mb = t.getMenuBar();
if(mb!=null) {
setJMenuBar(t.getMenuBar());
}
} else {
// Ok, set a default title string
setTitle("RetepTools Standalone");
}
}
/**
* You must overide this method with your initialiser.
*/
public abstract JComponent init() throws Exception;
}

View File

@ -0,0 +1,141 @@
package uk.org.retep.util.hba;
import uk.org.retep.tools.Tool;
import uk.org.retep.util.models.HBATableModel;
import java.awt.*;
import java.io.*;
import java.util.*;
import javax.swing.table.*;
import javax.swing.*;
/**
* pg_hba.conf editor (& repairer)
*
* @author
* @version 1.0
*/
public class Editor extends JPanel implements Tool
{
BorderLayout borderLayout1 = new BorderLayout();
HBATableModel model = new HBATableModel();
JPanel jPanel1 = new JPanel();
GridBagLayout gridBagLayout1 = new GridBagLayout();
JLabel jLabel1 = new JLabel();
JComboBox typeEntry = new JComboBox();
JLabel jLabel2 = new JLabel();
JLabel jLabel3 = new JLabel();
JLabel jLabel4 = new JLabel();
JTextField ipEntry = new JTextField();
JTextField maskEntry = new JTextField();
JComboBox authEntry = new JComboBox();
JTextField argEntry = new JTextField();
JLabel jLabel5 = new JLabel();
JPanel jPanel2 = new JPanel();
FlowLayout flowLayout1 = new FlowLayout();
JButton jButton1 = new JButton();
JButton jButton2 = new JButton();
JScrollPane jScrollPane1 = new JScrollPane();
JButton jButton3 = new JButton();
JTable jTable1 = new JTable();
public Editor()
{
try
{
jbInit();
}
catch(Exception ex)
{
ex.printStackTrace();
}
}
void jbInit() throws Exception
{
this.setLayout(borderLayout1);
jTable1.setPreferredSize(new Dimension(600, 300));
jTable1.setModel(model);
this.setPreferredSize(new Dimension(600, 300));
this.add(jScrollPane1, BorderLayout.CENTER);
jScrollPane1.getViewport().add(jTable1, null);
jPanel1.setLayout(gridBagLayout1);
jLabel1.setText("Type");
jLabel2.setText("IP Address");
jLabel3.setText("Mask");
jLabel4.setText("Authentication");
ipEntry.setText("jTextField1");
maskEntry.setText("jTextField2");
argEntry.setText("jTextField3");
jLabel5.setText("Argument");
jPanel2.setLayout(flowLayout1);
jButton1.setText("New entry");
jButton2.setText("Validate");
jButton3.setText("Devele");
this.add(jPanel1, BorderLayout.SOUTH);
jPanel1.add(jLabel1, new GridBagConstraints(1, 0, 1, 1, 0.0, 0.0
,GridBagConstraints.EAST, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
jPanel1.add(typeEntry, new GridBagConstraints(2, 0, 1, 1, 0.0, 0.0
,GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
jPanel1.add(jLabel2, new GridBagConstraints(1, 1, 1, 1, 0.0, 0.0
,GridBagConstraints.EAST, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
jPanel1.add(jLabel3, new GridBagConstraints(3, 1, 1, 1, 0.0, 0.0
,GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
jPanel1.add(jLabel4, new GridBagConstraints(1, 2, 1, 1, 0.0, 0.0
,GridBagConstraints.EAST, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
jPanel1.add(ipEntry, new GridBagConstraints(2, 1, 1, 1, 0.0, 0.0
,GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
jPanel1.add(maskEntry, new GridBagConstraints(4, 1, 1, 1, 0.0, 0.0
,GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
jPanel1.add(authEntry, new GridBagConstraints(2, 2, 1, 1, 0.0, 0.0
,GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
jPanel1.add(argEntry, new GridBagConstraints(4, 2, 1, 1, 0.0, 0.0
,GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
jPanel1.add(jLabel5, new GridBagConstraints(3, 2, 1, 1, 0.0, 0.0
,GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
jPanel1.add(jPanel2, new GridBagConstraints(0, 3, 5, 1, 0.0, 0.0
,GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
jPanel2.add(jButton3, null);
jPanel2.add(jButton1, null);
jPanel2.add(jButton2, null);
}
public void openFile(String aFilename)
throws IOException
{
FileInputStream fis = new FileInputStream(aFilename);
BufferedReader br = new BufferedReader(new InputStreamReader(fis));
ArrayList list = model.getArray();
String s = br.readLine();
while(s!=null) {
if(s.startsWith("#")) {
// ignore comments
} else {
Record rec = Record.parseLine(s);
if(rec!=null) {
rec.validate();
list.add(rec);
}
}
s=br.readLine();
}
model.fireTableDataChanged();
}
public JMenuBar getMenuBar()
{
return null;
}
public String getTitle()
{
return "pg_hba.conf Editor/Repair tool";
}
public void setStandaloneMode(boolean aMode)
{
}
}

View File

@ -0,0 +1,47 @@
package uk.org.retep.util.hba;
import uk.org.retep.util.ExceptionDialog;
import uk.org.retep.util.Globals;
import uk.org.retep.util.Logger;
import uk.org.retep.util.StandaloneApp;
import java.io.IOException;
import javax.swing.JComponent;
import javax.swing.JPanel;
/**
* Standalone entry point for the Properties editor
*
* $Id: Main.java,v 1.1 2001/03/05 09:15:37 peter Exp $
*/
public class Main extends StandaloneApp
{
public Main(String[] args)
throws Exception
{
super(args);
}
public JComponent init()
throws Exception
{
Globals globals = Globals.getInstance();
Editor editor = new Editor();
if(globals.getArgumentCount()>0) {
editor.openFile(globals.getArgument(0));
}
return editor;
}
public static void main(String[] args)
throws Exception
{
Main main = new Main(args);
main.pack();
main.setVisible(true);
}
}

View File

@ -0,0 +1,238 @@
package uk.org.retep.util.hba;
import uk.org.retep.util.Logger;
import uk.org.retep.util.misc.IPAddress;
import uk.org.retep.util.misc.WStringTokenizer;
/**
* Used to store the entries of a pg_hba.conf file
* @author
* @version 1.0
*/
public class Record
{
int type;
String dbname;
IPAddress ip;
IPAddress mask;
int authType;
String authArg;
public static final int TYPE_LOCAL = 0;
public static final int TYPE_HOST = 1;
public static final int TYPE_HOSTSSL = 2;
public static final String types[] = {
"local","host","hostssl"
};
public static final int AUTH_TRUST = 0;
public static final int AUTH_PASSWORD = 1;
public static final int AUTH_CRYPT = 2;
public static final int AUTH_IDENT = 3;
public static final int AUTH_KRB4 = 4;
public static final int AUTH_KRB5 = 5;
public static final int AUTH_REJECT = 6;
public static final String auths[] = {
"trust","password","crypt",
"ident",
"krb4","krb5",
"reject"
};
private static final String spc = " ";
public Record()
{
}
public int getType()
{
return type;
}
public void setType(int aType)
{
type=aType;
}
public String getDatabase()
{
return dbname;
}
public void setDatabase(String aDB)
{
dbname=aDB;
}
public int getAuthType()
{
return authType;
}
public void setAuthType(int aType)
{
authType=aType;
}
public String getAuthArgs()
{
return authArg;
}
public void setAuthArgs(String aArg)
{
authArg=aArg;
}
public IPAddress getIP()
{
return ip;
}
public void setIP(String aArg)
{
setIP(new IPAddress(aArg));
}
public void setIP(IPAddress aArg)
{
ip=aArg;
}
public IPAddress getMask()
{
return mask;
}
public void setMask(String aArg)
{
setMask(new IPAddress(aArg));
}
public void setMask(IPAddress aArg)
{
mask=aArg;
}
public String toString()
{
StringBuffer buf = new StringBuffer();
write(buf);
return buf.toString();
}
public void write(StringBuffer buf)
{
buf.append(types[type]).append(spc);
if(type==TYPE_HOST || type==TYPE_HOSTSSL) {
buf.append(getIP()).append(spc);
buf.append(getMask()).append(spc);
}
buf.append(auths[authType]);
// Now the authArg
switch(type)
{
// These have no authArgs
case AUTH_TRUST:
case AUTH_REJECT:
case AUTH_KRB4:
case AUTH_KRB5:
break;
// These must have an arg
case AUTH_IDENT:
buf.append(spc).append(getAuthArgs());
break;
// These may have an optional arg
case AUTH_PASSWORD:
case AUTH_CRYPT:
if(!(authArg==null || authArg.equals("")))
buf.append(spc).append(getAuthArgs());
break;
}
}
private static WStringTokenizer tok;
public static Record parseLine(String s)
{
Record res = new Record();
int type;
if(s==null || s.equals("") || s.startsWith("#"))
return null;
if(tok==null)
tok=new WStringTokenizer();
tok.setString(s);
type=WStringTokenizer.matchToken(types,tok.nextToken());
res.setType(type);
res.setDatabase(tok.nextToken());
if(type==TYPE_HOST || type==TYPE_HOSTSSL) {
res.setIP(new IPAddress(tok.nextToken()));
res.setMask(new IPAddress(tok.nextToken()));
}
res.setAuthType(WStringTokenizer.matchToken(auths,tok.nextToken()));
res.setAuthArgs(tok.nextToken());
return res;
}
public static final int VALID = 0;
public static final int INVALID_TYPE = 1;
public static final int INVALID_IPREQUIRED = 2;
/**
* Validates the record
*/
public int validate()
{
switch(type)
{
case TYPE_HOST:
case TYPE_HOSTSSL:
if(ip==null || ip.isInvalid()) {
Logger.log(Logger.INFO,"pg_hba.conf: IP missing or invalid - repairing");
setMask("127.0.0.1");
}
if(mask==null || mask.isInvalid() || !ip.validateMask(mask)) {
Logger.log(Logger.INFO,"pg_hba.conf: IP address without mask - repairing");
setMask(ip.getMask());
}
break;
case TYPE_LOCAL:
break;
default:
return INVALID_TYPE;
}
return VALID;
}
/*
# host all 192.168.54.1 255.255.255.255 reject
# host all 0.0.0.0 0.0.0.0 krb5
# host all 192.168.0.0 255.255.0.0 ident omicron
#
local all trust
host all 127.0.0.1 255.255.255.255 trust
*/
}

View File

@ -0,0 +1,125 @@
package uk.org.retep.util.misc;
import java.util.StringTokenizer;
/**
* Represent an IP address
* @author
* @version 1.0
*/
public class IPAddress
{
protected long address;
protected long b[] = new long[4];
protected boolean invalid=true;
public IPAddress()
{
}
public IPAddress(String s)
{
setAddress(s);
}
public synchronized void setAddress(String s)
{
if(s==null || s.equals("")) {
invalid=true;
return;
}
address=0;
StringTokenizer tok = new StringTokenizer(s,".");
int i=0;
while(i<4 && tok.hasMoreElements()) {
b[i++] = Long.parseLong(tok.nextToken());
}
while(i<4) {
b[i++]=0;
}
invalid=false;
refresh();
}
public void refresh()
{
if(invalid)
return;
address = (b[0]<<24) | (b[1]<<16) | (b[2]<<8) | (b[3]);
}
public boolean isInvalid()
{
refresh();
return invalid;
}
public String toString()
{
refresh();
if(invalid)
return "*INVALID*";
return Long.toString(b[0])+"."+Long.toString(b[1])+"."+Long.toString(b[2])+"."+Long.toString(b[3]);
}
public boolean equals(Object o)
{
if(o instanceof IPAddress) {
IPAddress ip = (IPAddress) o;
refresh();
ip.refresh();
if(ip.invalid == invalid)
return false;
return address==ip.address;
}
return false;
}
private static int gethoststart(long b)
{
if((b & 0x80)==0x00) return 1; // class A
if((b & 0xc0)==0x80) return 2; // class B
if((b & 0xe0)==0xc0) return 3; // class C
return 4; // class D
}
public boolean validateMask(IPAddress mask)
{
// If were a network check the host mask
int i=gethoststart(b[0]);
System.out.println("Host start "+i);
while(i<4 && b[i]==0) {
if(mask.b[i++]>0)
return false;
}
for(i=0;i<4;i++) {
if((b[i]&mask.b[i])!=b[i])
return false;
}
return true;
}
public IPAddress getMask()
{
IPAddress mask = new IPAddress();
int i=3;
while(i>-1 && b[i]==0) {
mask.b[i--]=0;
}
while(i>-1) {
mask.b[i--]=255;
}
mask.invalid=false;
mask.refresh();
return mask;
}
}

View File

@ -0,0 +1,157 @@
package uk.org.retep.util.misc;
import java.io.*;
import java.util.Date;
import java.util.Iterator;
import java.util.Properties;
import java.util.TreeMap;
/**
* Misc Properties utilities..
* @author
* @version 1.0
*/
public class PropertiesIO
{
public PropertiesIO()
{
}
/**
* Builds a TreeMap based on the given Properties object. This is useful
* because the keys will be in sorted order.
*/
public static TreeMap getTreeMap(Properties p)
{
TreeMap map = new TreeMap();
Iterator e = p.keySet().iterator();
while(e.hasNext()) {
Object k = e.next();
map.put(k,p.get(k));
}
return map;
}
/**
* Writes a Properties file to the writer. This is similar to Properties.save
* except you can pick the key/value separator
*/
public static synchronized void save(Properties p,OutputStream out,char sep,String header)
throws IOException
{
save(p,p.keySet().iterator(),out,sep,header);
}
/**
* Writes a Properties file to the writer. This is similar to Properties.save
* except you can pick the key/value separator and the keys are written
* in a sorted manner
*/
public static synchronized void saveSorted(Properties p,OutputStream out,char sep,String header)
throws IOException
{
save(p,getTreeMap(p).keySet().iterator(),out,sep,header);
}
/**
* This is the same as save, only the keys in the enumeration are written.
*/
public static synchronized void save(Properties p,Iterator e, OutputStream out,char sep,String header)
throws IOException
{
BufferedWriter w = new BufferedWriter(new OutputStreamWriter(out, "8859_1"));
if (header != null) {
w.write('#');
w.write(header);
w.newLine();
}
w.write('#');
w.write(new Date().toString());
w.newLine();
while(e.hasNext()) {
String key = (String)e.next();
w.write(encode(key,true));
w.write(sep);
w.write(encode((String)p.get(key),false));
w.newLine();
}
w.flush();
}
private static final String specialSaveChars = "=: \t\r\n\f#!";
/**
* Encodes a string in a way similar to the JDK's Properties method
*/
public static String encode(String s, boolean escapeSpace)
{
int l=s.length();
StringBuffer buf = new StringBuffer(l<<1);
for(int i=0;i<l;i++) {
char c = s.charAt(i);
switch(c)
{
case ' ':
if(i==0 || escapeSpace) {
buf.append('\\');
}
buf.append(' ');
break;
case '\\':
buf.append('\\').append('\\');
break;
case '\t':
buf.append('\\').append('t');
break;
case '\n':
buf.append('\\').append('n');
break;
case '\r':
buf.append('\\').append('r');
break;
case '\f':
buf.append('\\').append('f');
break;
default:
if((c<0x20)||(c>0x7e)) {
buf.append('\\').append('u');
buf.append(toHex((c >> 12) & 0xF));
buf.append(toHex((c >> 8) & 0xF));
buf.append(toHex((c >> 4) & 0xF));
buf.append(toHex( c & 0xF));
} else {
if (specialSaveChars.indexOf(c) != -1)
buf.append('\\');
buf.append(c);
}
}
}
return buf.toString();
}
/**
* Convert a nibble to a hex character
* @param nibble the nibble to convert.
*/
public static char toHex(int n) {
return hd[(n & 0xF)];
}
/** A table of hex digits */
private static final char[] hd = {
'0','1','2','3','4','5','6','7',
'8','9','A','B','C','D','E','F'
};
}

View File

@ -0,0 +1,102 @@
package uk.org.retep.util.misc;
/**
* Similar to StringTokenizer but handles white spaces and multiple delimiters
* between tokens. It also handles quotes
*
* @author
* @version 1.0
*/
public class WStringTokenizer
{
String string;
int pos,len;
/**
* Constructor
*/
public WStringTokenizer()
{
}
/**
* Constructor: set the initial string
* @param aString String to tokenise
*/
public WStringTokenizer(String aString)
{
setString(aString);
}
/**
* @param aString String to tokenise
*/
public void setString(String aString)
{
string=aString;
pos=0;
len=string.length();
}
/**
* @return true if more tokens may be possible
*/
public boolean hasMoreTokens()
{
return !(string==null || pos==len);
}
/**
* @return next token, null if complete.
*/
public String nextToken()
{
char c;
boolean q=false;
if(!hasMoreTokens())
return null;
// find start of token
while(pos<len) {
c = string.charAt(pos);
if(c=='\'' || c=='\"')
q=!q;
if(q || c==' '||c=='\t')
pos++;
else
break;
}
// find last char of token
int p=pos;
while(pos<len) {
c = string.charAt(pos);
if(c=='\'' || c=='\"')
q=!q;
if(!q && (c==' '||c=='\t') )
break;
else
pos++;
}
return string.substring(p,pos);
}
/**
* Compare a string against an array of strings and return the index
* @param t array to compare against (all lowercase)
* @param s string to test
* @return index in t of s, -1 if not present
*/
public static int matchToken(String[] t,String s)
{
s=s.toLowerCase();
for(int i=0;i<t.length;i++)
if(t[i].equals(s))
return i;
return -1;
}
}

View File

@ -0,0 +1,91 @@
package uk.org.retep.util.models;
import uk.org.retep.util.hba.Record;
import java.util.ArrayList;
import java.util.Iterator;
import javax.swing.table.*;
/**
* A TableModel to display the contents of a pg_hba.conf file
* @author
* @version 1.0
*/
public class HBATableModel extends AbstractTableModel
{
ArrayList list = new ArrayList();
private static final String cols[] = {
"Type","Database","IP Address","IP Mask","Authentication","Arguments"
};
public HBATableModel()
{
}
public ArrayList getArray()
{
return list;
}
public int getColumnCount()
{
return cols.length;
}
public Object getValueAt(int aRow, int aCol)
{
Record rec = (Record) list.get(aRow);
int t;
switch(aCol)
{
case 0:
t = rec.getType();
return t<0 ? "ERR" : Record.types[t] ;
case 1:
return rec.getDatabase();
case 2:
return rec.getIP();
case 3:
return rec.getMask();
case 4:
t=rec.getAuthType();
return t<0 ? "ERR" : Record.auths[t] ;
case 5:
return rec.getAuthArgs();
default:
return "";
}
}
public int getRowCount()
{
return list.size();
}
public boolean isCellEditable(int rowIndex, int columnIndex)
{
/**@todo: Override this javax.swing.table.AbstractTableModel method*/
return super.isCellEditable( rowIndex, columnIndex);
}
public String getColumnName(int aColumn)
{
return cols[aColumn];
}
public void setValueAt(Object aValue, int rowIndex, int columnIndex)
{
/**@todo: Override this javax.swing.table.AbstractTableModel method*/
super.setValueAt( aValue, rowIndex, columnIndex);
}
}

View File

@ -0,0 +1,176 @@
package uk.org.retep.util.models;
import uk.org.retep.util.Logger;
import uk.org.retep.util.misc.PropertiesIO;
import java.io.*;
import java.util.*;
import javax.swing.table.*;
import java.text.*;
/**
* A TableModel that shows a view of a PropertyFile object
*
* $Id: PropertiesTableModel.java,v 1.1 2001/03/05 09:15:37 peter Exp $
*
* @author
* @version 1.0
*/
public class PropertiesTableModel extends AbstractTableModel
{
// The properties
protected TreeMap properties;
protected Properties origProperties;
protected Object keys[];
public PropertiesTableModel()
{
this(new Properties());
}
public PropertiesTableModel(Properties aProperties)
{
setProperties(aProperties);
}
public synchronized int getKeyRow(Object k)
{
for(int i=0;i<keys.length;i++) {
if(keys[i].equals(k)) {
return i;
}
}
return -1;
}
/**
* Best not use this one to update, use the put method in this class!
*/
public Properties getProperties()
{
return origProperties;
}
public synchronized void put(Object k,Object v)
{
properties.put(k,v);
origProperties.put(k,v);
refresh();
}
public Object get(Object k)
{
return origProperties.get(k);
}
public synchronized void remove(Object k)
{
properties.remove(k);
origProperties.remove(k);
refresh();
}
public boolean contains(Object o)
{
return origProperties.contains(o);
}
public boolean containsKey(Object o)
{
return origProperties.containsKey(o);
}
public boolean containsValue(Object o)
{
return origProperties.containsValue(o);
}
public void setProperties(Properties aProperties)
{
origProperties=aProperties;
properties = PropertiesIO.getTreeMap(aProperties);
refresh();
}
public void refresh()
{
keys = properties.keySet().toArray();
fireTableDataChanged();
}
private static final String cols[] = {
"Property","Value"
};
public int getColumnCount()
{
return cols.length;
}
public Object getValueAt(int aRow, int aColumn)
{
if(aRow<0 || aRow>=keys.length || aColumn<0 || aColumn>=cols.length)
return null;
Object key = keys[aRow];
switch(aColumn)
{
case 0:
return key;
case 1:
return properties.get(key);
default:
return null;
}
}
public int getRowCount()
{
return keys.length;
}
public String getColumnName(int aColumn)
{
return cols[aColumn];
}
public void setValueAt(Object aValue, int aRow, int aColumn)
{
if(aRow<0 || aRow>=keys.length || aColumn<0 || aColumn>=cols.length)
return;
switch(aColumn)
{
// Rename the key (only if not already present). If already present
// the refresh() will replace with the old one anyhow...
case 0:
if(!properties.containsKey(aValue)) {
Object oldValue = get(keys[aRow]);
remove(keys[aRow]);
put(aValue,oldValue);
}
refresh();
break;
// Update the value...
case 1:
put(keys[aRow],aValue);
//refresh();
break;
default:
// Should never be called
Logger.log(Logger.ERROR,"PropertiesTableModel: Column range",aColumn);
}
}
public boolean isCellEditable(int aRow, int aColumn)
{
return true;
}
}

View File

@ -0,0 +1,53 @@
package uk.org.retep.util.proped;
import uk.org.retep.util.ExceptionDialog;
import uk.org.retep.util.Globals;
import uk.org.retep.util.Logger;
import uk.org.retep.util.StandaloneApp;
import java.io.IOException;
import java.util.Iterator;
import javax.swing.JComponent;
/**
* Standalone entry point for the Properties editor
*
* $Id: Main.java,v 1.1 2001/03/05 09:15:38 peter Exp $
*/
public class Main extends StandaloneApp
{
public Main(String[] args)
throws Exception
{
super(args);
}
public JComponent init()
throws Exception
{
Globals globals = Globals.getInstance();
PropertyEditor panel = new PropertyEditor();
// Only handle 1 open at a time in standalone mode
if(globals.getArgumentCount()>0) {
try {
panel.openFile(globals.getArgument(0));
} catch(IOException ioe) {
ExceptionDialog.displayException(ioe,"while loading "+globals.getArgument(0));
throw (Exception) ioe.fillInStackTrace();
}
}
return panel;
}
public static void main(String[] args)
throws Exception
{
Main main = new Main(args);
main.pack();
main.setVisible(true);
}
}

View File

@ -0,0 +1,381 @@
package uk.org.retep.util.proped;
import uk.org.retep.util.ExceptionDialog;
import uk.org.retep.util.misc.PropertiesIO;
import uk.org.retep.util.models.PropertiesTableModel;
import java.awt.*;
import java.io.*;
import java.util.*;
import javax.swing.*;
import java.awt.event.*;
/**
* A property file editor
*
* $Id: PropertyEditor.java,v 1.1 2001/03/05 09:15:38 peter Exp $
*
* @author
* @version 1.0
*/
public class PropertyEditor
extends JPanel
implements uk.org.retep.tools.Tool
{
BorderLayout borderLayout1 = new BorderLayout();
// The filename, null if not set
String filename;
File file;
JScrollPane jScrollPane1 = new JScrollPane();
JTable contentTable = new JTable();
PropertiesTableModel model = new PropertiesTableModel();
boolean standaloneMode;
private static final String TITLE_PREFIX = "Retep PropertyEditor";
JPopupMenu popupMenu = new JPopupMenu();
JMenuItem newPopupItem = new JMenuItem();
JMenuItem dupPopupItem = new JMenuItem();
JMenuItem delPopupItem = new JMenuItem();
JMenuBar menuBar = new JMenuBar();
JMenu jMenu1 = new JMenu();
JMenuItem jMenuItem4 = new JMenuItem();
JMenuItem jMenuItem5 = new JMenuItem();
JMenuItem jMenuItem6 = new JMenuItem();
JMenuItem jMenuItem7 = new JMenuItem();
JMenuItem jMenuItem8 = new JMenuItem();
JMenuItem closeMenuItem = new JMenuItem();
public PropertyEditor()
{
try
{
jbInit();
}
catch(Exception ex)
{
ex.printStackTrace();
}
}
/**
* @return the default menubar
*/
public JMenuBar getMenuBar()
{
return menuBar;
}
/**
* @return the File menu
*/
public JMenu getMenu()
{
return jMenu1;
}
/**
* @return the recomended title string for the parent JFrame/JInternalFrame
*/
public String getTitle()
{
if(filename==null) {
return TITLE_PREFIX;
}
return TITLE_PREFIX+": "+filename;
}
/**
* Sets menus up to Standalone mode
*/
public void setStandaloneMode(boolean aMode)
{
standaloneMode=aMode;
if(aMode) {
closeMenuItem.setText("Exit");
} else {
closeMenuItem.setText("Close");
}
}
public boolean isStandalone()
{
return standaloneMode;
}
public void openFile(String aFile)
throws IOException
{
openFile(new File(aFile));
}
public void openFile(File aFile)
throws IOException
{
FileInputStream fis = new FileInputStream(aFile);
Properties p = new Properties();
p.load(fis);
fis.close();
model.setProperties(p);
file=aFile;
filename = aFile.getAbsolutePath();
}
public void saveFile(File aFile)
throws IOException
{
FileOutputStream fis = new FileOutputStream(aFile);
PropertiesIO.save(model.getProperties(),fis,'=',"Written by "+TITLE_PREFIX);
fis.close();
filename = aFile.getAbsolutePath();
file = aFile;
}
void jbInit() throws Exception
{
this.setLayout(borderLayout1);
contentTable.setToolTipText("");
contentTable.setAutoResizeMode(JTable.AUTO_RESIZE_LAST_COLUMN);
contentTable.setModel(model);
contentTable.addMouseListener(new java.awt.event.MouseAdapter()
{
public void mouseClicked(MouseEvent e)
{
contentTable_mouseClicked(e);
}
public void mouseReleased(MouseEvent e)
{
contentTable_mouseReleased(e);
}
});
newPopupItem.setText("New");
newPopupItem.addActionListener(new java.awt.event.ActionListener()
{
public void actionPerformed(ActionEvent e)
{
newPopupItem_actionPerformed(e);
}
});
dupPopupItem.setText("Duplicate");
dupPopupItem.setAccelerator(javax.swing.KeyStroke.getKeyStroke(67, java.awt.event.KeyEvent.CTRL_MASK, false));
dupPopupItem.addActionListener(new java.awt.event.ActionListener()
{
public void actionPerformed(ActionEvent e)
{
dupPopupItem_actionPerformed(e);
}
});
delPopupItem.setText("Delete");
delPopupItem.setAccelerator(javax.swing.KeyStroke.getKeyStroke(68, java.awt.event.KeyEvent.CTRL_MASK, false));
delPopupItem.addActionListener(new java.awt.event.ActionListener()
{
public void actionPerformed(ActionEvent e)
{
delPopupItem_actionPerformed(e);
}
});
jMenu1.setText("File");
jMenuItem4.setText("Open");
jMenuItem4.addActionListener(new java.awt.event.ActionListener()
{
public void actionPerformed(ActionEvent e)
{
jMenuItem4_actionPerformed(e);
}
});
jMenuItem5.setText("Save");
jMenuItem5.addActionListener(new java.awt.event.ActionListener()
{
public void actionPerformed(ActionEvent e)
{
jMenuItem5_actionPerformed(e);
}
});
jMenuItem6.setText("Save As");
jMenuItem6.addActionListener(new java.awt.event.ActionListener()
{
public void actionPerformed(ActionEvent e)
{
jMenuItem6_actionPerformed(e);
}
});
jMenuItem7.setText("Revert");
jMenuItem7.addActionListener(new java.awt.event.ActionListener()
{
public void actionPerformed(ActionEvent e)
{
jMenuItem7_actionPerformed(e);
}
});
jMenuItem8.setText("Print");
closeMenuItem.setText("Close");
closeMenuItem.addActionListener(new java.awt.event.ActionListener()
{
public void actionPerformed(ActionEvent e)
{
closeMenuItem_actionPerformed(e);
}
});
jMenu2.setText("Edit");
jMenuItem1.setText("New");
jMenuItem1.setAccelerator(javax.swing.KeyStroke.getKeyStroke(78, java.awt.event.KeyEvent.CTRL_MASK, false));
jMenuItem1.addActionListener(new java.awt.event.ActionListener()
{
public void actionPerformed(ActionEvent e)
{
newPopupItem_actionPerformed(e);
}
});
jMenuItem2.setText("Duplicate");
jMenuItem3.setText("Delete");
this.add(jScrollPane1, BorderLayout.CENTER);
jScrollPane1.getViewport().add(contentTable, null);
popupMenu.add(newPopupItem);
popupMenu.add(dupPopupItem);
popupMenu.add(delPopupItem);
menuBar.add(jMenu1);
menuBar.add(jMenu2);
jMenu1.add(jMenuItem4);
jMenu1.add(jMenuItem5);
jMenu1.add(jMenuItem6);
jMenu1.add(jMenuItem7);
jMenu1.addSeparator();
jMenu1.add(jMenuItem8);
jMenu1.addSeparator();
jMenu1.add(closeMenuItem);
jMenu2.add(jMenuItem1);
jMenu2.add(jMenuItem2);
jMenu2.add(jMenuItem3);
}
Point popupPoint = new Point();
JMenu jMenu2 = new JMenu();
JMenuItem jMenuItem1 = new JMenuItem();
JMenuItem jMenuItem2 = new JMenuItem();
JMenuItem jMenuItem3 = new JMenuItem();
void contentTable_mouseClicked(MouseEvent e)
{
if(e.isPopupTrigger()) {
popupPoint.setLocation(e.getX(),e.getY());
popupMenu.show(contentTable,e.getX(),e.getY());
}
}
void contentTable_mouseReleased(MouseEvent e)
{
contentTable_mouseClicked(e);
}
void jMenuItem4_actionPerformed(ActionEvent e)
{
JFileChooser fc = new JFileChooser();
if(fc.showOpenDialog(this) == JFileChooser.APPROVE_OPTION) {
try {
openFile(fc.getSelectedFile());
} catch(IOException ioe) {
ExceptionDialog.displayException(ioe);
}
}
}
void closeMenuItem_actionPerformed(ActionEvent e)
{
if(standaloneMode) {
System.exit(0);
} else {
filename="";
file=null;
model.setProperties(new Properties());
}
}
void newPopupItem_actionPerformed(ActionEvent e)
{
int y = contentTable.rowAtPoint(popupPoint);
// create a new unique key based on the current one
String key=(String) model.getValueAt(y,0);
if(key==null) {
key="new-key";
}
int uid=1;
while(model.containsKey(key+uid)) {
uid++;
}
key=key+uid;
model.put(key,"");
contentTable.clearSelection();
}
void dupPopupItem_actionPerformed(ActionEvent e)
{
int y = contentTable.rowAtPoint(popupPoint);
// create a new unique key based on the current one
String key=(String) model.getValueAt(y,0);
Object val=model.get(key);
int uid=1;
while(model.containsKey(key+uid)) {
uid++;
}
key=key+uid;
model.put(key,val);
contentTable.clearSelection();
}
void delPopupItem_actionPerformed(ActionEvent e)
{
int y = contentTable.rowAtPoint(popupPoint);
model.remove(model.getValueAt(y,0));
}
void jMenuItem6_actionPerformed(ActionEvent e)
{
JFileChooser fc = new JFileChooser();
if(fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION) {
try {
saveFile(fc.getSelectedFile());
} catch(IOException ioe) {
ExceptionDialog.displayException(ioe);
}
}
}
void jMenuItem5_actionPerformed(ActionEvent e)
{
if(filename==null) {
jMenuItem6_actionPerformed(e);
} else {
try {
saveFile(file);
} catch(IOException ioe) {
ExceptionDialog.displayException(ioe);
}
}
}
void jMenuItem7_actionPerformed(ActionEvent e)
{
// add check here
if(file!=null) {
try {
openFile(file);
} catch(IOException ioe) {
ExceptionDialog.displayException(ioe);
}
} else {
jMenuItem4_actionPerformed(e);
}
}
}

View File

@ -0,0 +1,334 @@
package uk.org.retep.xml.core;
import java.io.IOException;
import java.io.Writer;
/**
* An XMLFactory is used to render XML Tags, accounting for nesting etc
*/
public class XMLFactory
{
/**
* The lest level (ie, how many tags down the tree we are)
*/
protected int level;
/**
* The size of our tag name cache
*/
protected int maxlevel;
/**
* Our tag name cache
*/
protected String[] names;
/**
* Used to keep track of how formatting is done
*/
protected boolean hascontent;
protected boolean[] contbuf;
/**
* Scratch used by nest()
*/
private char[] nestbuf;
/**
* The destination Writer
*/
protected Writer out;
/**
* True if we are still within a tag
*/
protected boolean inTag;
/**
* True if we have just created a tag so parameters are valid
*/
protected boolean inArg;
/**
* Constructs an XMLFactory with no output Writer
*/
public XMLFactory()
{
this(10);
}
/**
* Constructs an XMLFactory with no output Writer
* @param m Expected number of leaves in the XML Tree
*/
public XMLFactory(int m)
{
// Initialise the names cache
level=0;
maxlevel=m;
names=new String[maxlevel];
contbuf=new boolean[maxlevel];
// This is used by nest()
nestbuf=new char[maxlevel];
for(int i=0;i<maxlevel;i++)
nestbuf[i]=' ';
}
/**
* Constructs an XMLFactory
* @param out Writer to send the output to
*/
public XMLFactory(Writer out)
throws IOException
{
this();
setWriter(out);
}
/**
* Constructs an XMLFactory
* @param out Writer to send the output to
* @param encoding The XML encoding
*/
public XMLFactory(Writer out,String encoding)
throws IOException
{
this();
setWriter(out,encoding);
}
/**
* Constructs an XMLFactory
* @param out Writer to send the output to
* @param m Expected number of leaves in the XML Tree
*/
public XMLFactory(int m,Writer out)
throws IOException
{
this(m);
setWriter(out);
}
/**
* Constructs an XMLFactory
* @param out Writer to send the output to
* @param encoding The XML encoding
* @param m Expected number of leaves in the XML Tree
*/
public XMLFactory(int m,Writer out,String encoding)
throws IOException
{
this(m);
setWriter(out,encoding);
}
/**
* Sets the Writer to send the output to. This call will also send the
* XML header.
*
* @param out Writer to send output to
*/
public void setWriter(Writer out)
throws IOException
{
setWriter(out,"ISO-8859-1");
}
/**
* Sets the Writer to send the output to. This call will also send the
* XML header using the supplied encoding. It is up to the user code to
* implement this encoding.
*
* @param out Writer to send output to
* @param encoding Encoding of the XML Output
*/
public void setWriter(Writer out,String encoding)
throws IOException
{
this.out=out;
out.write("<?xml version=\"1.0\" encoding=\"");
out.write(encoding);
out.write("\" ?>\n");
}
/**
* @return Writer the XML is being sent out on.
*/
public Writer getWriter() {
return out;
}
/**
* This starts a tag
* @param name The tag name
*/
public void startTag(String name)
throws IOException
{
if(inTag && inArg) {
// Handles two startTag() calls in succession.
out.write(">");
}
nest(level);
out.write('<');
out.write(name);
inTag=true;
inArg=true;
// cache the current tag name
names[level]=name;
// cache the current hascontent value & reset
contbuf[level]=hascontent;
hascontent=false;
// increase the level and the cache's as necessary
level++;
if(level>maxlevel) {
maxlevel=maxlevel+10;
String n[]=new String[maxlevel];
System.arraycopy(names,0,n,0,level);
names=n;
boolean b[] = new boolean[maxlevel];
System.arraycopy(contbuf,0,b,0,level);
contbuf=b;
}
}
/**
* This ends a tag
*/
public void endTag()
throws IOException, XMLFactoryException
{
if(level<1)
throw new XMLFactoryException("endTag called above root node");
level--;
if(inArg) {
// We are still within the opening tag
out.write(" />");
} else {
// We must have written some content or child tags
// hascontent is true if addContent() was called. If it was never called
// to get here some child tags must have been written, so we call nest()
// so that the close tag is on it's own line, and everything looks neat
// and tidy.
if(!hascontent)
nest(level);
out.write("</");
out.write(names[level]);
out.write('>');
}
inArg=false; // The parent tag must be told it now has content
inTag= level>0; // Are we still in a tag?
hascontent=contbuf[level]; // retrieve this level's hascontent value
}
/**
* This completes the document releasing any open resources.
*/
public void close()
throws IOException, XMLFactoryException
{
while(level>0)
endTag();
out.write('\n');
out.flush();
}
/**
* This writes an attribute to the current tag. If the value is null, then no action is taken.
* @param name Name of the parameter
* @param value Value of the parameter
* @throw XMLFactoryException if out of context
*/
public void addAttribute(String name,Object value)
throws IOException, XMLFactoryException
{
if(value==null)
return;
if(inArg) {
out.write(' ');
out.write(name);
out.write("=\"");
out.write(encode(value.toString()));
out.write("\"");
} else
throw new XMLFactoryException("Cannot add attribute outside of a tag");
}
/**
* This writes some content to the current tag. Once this has been called,
* you cannot add any more attributes to the current tag. Note, if c is null,
* no action is taken.
* @param c content to add.
*/
public void addContent(Object c)
throws IOException, XMLFactoryException
{
if(c==null)
return;
if(inTag) {
if(inArg) {
// close the open tag
out.write('>');
inArg=false;
}
out.write(c.toString());
// This is used by endTag()
hascontent=true;
} else
throw new XMLFactoryException("Cannot add content outside of a tag");
}
/**
* This adds a comment to the XML file. This is normally used at the start of
* any XML output.
* @parm c Comment to include
*/
public void addComment(Object c)
throws IOException, XMLFactoryException
{
if(inTag)
throw new XMLFactoryException("Cannot add comments within a tag");
out.write("\n<!-- ");
out.write(c.toString());
out.write(" -->");
}
/**
* Indents the output according to the level
* @param level The indent level to generate
*/
protected void nest(int level)
throws IOException
{
out.write('\n');
while(level>nestbuf.length) {
out.write(nestbuf,0,nestbuf.length);
level-=nestbuf.length;
}
out.write(nestbuf,0,level);
}
/**
* Encodes the string so that any XML tag chars are translated
*/
protected String encode(String s) {
return s;
}
}

View File

@ -0,0 +1,19 @@
package uk.org.retep.xml.core;
/**
* Title:
* Description:
* Copyright: Copyright (c) 2001
* Company:
* @author
* @version 1.0
*/
public class XMLFactoryException extends Exception
{
public XMLFactoryException(String s)
{
super(s);
}
}

View File

@ -0,0 +1,237 @@
package uk.org.retep.xml.jdbc;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import uk.org.retep.xml.core.XMLFactory;
import uk.org.retep.xml.core.XMLFactoryException;
public class XMLDatabase
{
/**
* The XMLFactory being used by this instance
*/
protected XMLFactory factory;
/**
* Constructor. setXMLFactory() must be called if this method is used.
*/
public XMLDatabase()
{
}
/**
* Constructor
* @param fac XMLFactory to use
*/
public XMLDatabase(XMLFactory fac)
{
this();
setXMLFactory(fac);
}
/**
* Sets the factory to use.
* @param factory XMLFactory to use
*/
public void setXMLFactory(XMLFactory factory)
{
this.factory=factory;
}
/**
* @return the XMLFactory being used.
*/
public XMLFactory getXMLFactory()
{
return factory;
}
/**
* Flushes all output to the Writer.
* @throw IOException from Writer
* @throw XMLFactoryException from XMLFactory
*/
public void close()
throws IOException, XMLFactoryException
{
factory.close();
}
/**
* writes the schema of a table.
* @param con Connection to database
* @param table Table name
* @throw IOException from Writer
* @throw SQLException from JDBC
* @throw XMLFactoryException from XMLFactory
*/
public void writeTable(Connection con,String table)
throws IOException,SQLException,XMLFactoryException
{
writeTable(con.getMetaData(),table);
}
/**
* writes the schema of a table.
* @param db DatabaseMetaData for the database
* @param table Table name
* @throw IOException from Writer
* @throw SQLException from JDBC
* @throw XMLFactoryException from XMLFactory
*/
public void writeTable(DatabaseMetaData db,String table)
throws IOException,SQLException,XMLFactoryException
{
writeTable(db,null,null,table);
}
/**
* writes the schema of a table.
* @param db DatabaseMetaData for the database
* @param table Table name
* @throw IOException from Writer
* @throw SQLException from JDBC
* @throw XMLFactoryException from XMLFactory
*/
public void writeTable(DatabaseMetaData db,String cat,String schem,String table)
throws IOException,SQLException,XMLFactoryException
{
ResultSet trs;
factory.startTag("TABLE");
factory.addAttribute("NAME",table);
// fetch the remarks for this table (if any)
trs = db.getTables(null,null,table,null);
if(trs!=null) {
if(trs.next()) {
String rem = trs.getString(5);
if(rem!=null)
factory.addContent(rem);
}
trs.close();
}
trs = db.getColumns(null,null,table,"%");
if(trs!=null) {
while(trs.next()) {
factory.startTag("COLUMN");
factory.addAttribute("NAME",trs.getString(4));
factory.addAttribute("TYPE",trs.getString(6));
factory.addAttribute("COLUMN_SIZE",trs.getString(7));
factory.addAttribute("DECIMAL_DIGITS",trs.getString(9));
factory.addAttribute("NUM_PREC_RADIX",trs.getString(10));
factory.addAttribute("NULLABLE",trs.getString(11));
factory.addAttribute("COLUMN_DEF",trs.getString(13));
factory.addAttribute("CHAR_OCTET_LENGTH",trs.getString(16));
factory.addAttribute("ORDINAL_POSITION",trs.getString(17));
factory.addAttribute("IS_NULLABLE",trs.getString(18));
factory.addAttribute("TABLE_CAT",trs.getString(1));
factory.addAttribute("TABLE_SCHEM",trs.getString(2));
String rem = trs.getString(12);
if(rem!=null)
factory.addContent(rem);
factory.endTag();
}
trs.close();
}
factory.endTag();
}
/**
* This generates the schema of an entire database.
* @param db Connection to database
* @param table Table pattern
* @throw IOException from Writer
* @throw SQLException from JDBC
* @throw XMLFactoryException from XMLFactory
* @see java.sql.DatabaseMetaData.getTables()
*/
public void writeDatabase(Connection db,String table)
throws IOException, SQLException, XMLFactoryException
{
writeDatabase(db.getMetaData(),null,null,table);
}
/**
* This generates the schema of an entire database.
* @param db DatabaseMetaData of database
* @param table Table pattern
* @throw IOException from Writer
* @throw SQLException from JDBC
* @throw XMLFactoryException from XMLFactory
* @see java.sql.DatabaseMetaData.getTables()
*/
public void writeDatabase(DatabaseMetaData db,String table)
throws IOException, SQLException, XMLFactoryException
{
writeDatabase(db,null,null,table);
}
/**
* This generates the schema of an entire database.
* @param db DatabaseMetaData of database
* @param cat Catalog (may be null)
* @param schem Schema (may be null)
* @param table Table pattern
* @throw IOException from Writer
* @throw SQLException from JDBC
* @throw XMLFactoryException from XMLFactory
* @see java.sql.DatabaseMetaData.getTables()
*/
public void writeDatabase(Connection db)
throws IOException, SQLException, XMLFactoryException
{
writeDatabase(db.getMetaData(),null,null,"%");
}
/**
* This generates the schema of an entire database.
* @param db DatabaseMetaData of database
* @param cat Catalog (may be null)
* @param schem Schema (may be null)
* @param table Table pattern
* @throw IOException from Writer
* @throw SQLException from JDBC
* @throw XMLFactoryException from XMLFactory
* @see java.sql.DatabaseMetaData.getTables()
*/
public void writeDatabase(DatabaseMetaData db)
throws IOException, SQLException, XMLFactoryException
{
writeDatabase(db,null,null,"%");
}
/**
* This generates the schema of an entire database.
* @param db DatabaseMetaData of database
* @param cat Catalog (may be null)
* @param schem Schema (may be null)
* @param table Table pattern
* @throw IOException from Writer
* @throw SQLException from JDBC
* @throw XMLFactoryException from XMLFactory
* @see java.sql.DatabaseMetaData.getTables()
*/
public void writeDatabase(DatabaseMetaData db,String cat,String schem,String table)
throws IOException, SQLException, XMLFactoryException
{
ResultSet rs = db.getTables(cat,schem,table,null);
if(rs!=null) {
factory.startTag("DATABASE");
factory.addAttribute("PRODUCT",db.getDatabaseProductName());
factory.addAttribute("VERSION",db.getDatabaseProductVersion());
while(rs.next()) {
writeTable(db,rs.getString(1),rs.getString(2),rs.getString(3));
}
factory.endTag();
rs.close();
}
}
}

View File

@ -0,0 +1,505 @@
package uk.org.retep.xml.jdbc;
import java.io.IOException;
import java.io.Writer;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.Properties;
import uk.org.retep.xml.core.XMLFactory;
import uk.org.retep.xml.core.XMLFactoryException;
/**
* This class takes a java.sql.ResultSet object and generates an XML stream
* based on it's contents.
*
* $Id: XMLResultSet.java,v 1.1 2001/01/23 10:22:20 peter Exp $
*/
public class XMLResultSet
{
/**
* The current ResultSet to process
*/
protected ResultSet rs;
/**
* The XMLFactory being used by this instance
*/
protected XMLFactory factory;
/**
* The default properties used when none are supplied by the user
*/
protected static Properties defaults;
/**
* The default property name for defining the tag name used to define a
* ResultSet
*/
public static String RESULTSET_NAME = "resultset.name";
/**
* The default tag name for a resultset
*/
public static String DEFAULT_RESULTSET_NAME = "RESULTSET";
/**
* The default property name for defining the tag name used to define a row
*/
public static String ROW_NAME = "row.name";
/**
* The default tag name for a row
*/
public static String DEFAULT_ROW_NAME = "RECORD";
/**
* The default tag name for a resultset
*/
public static String COLNAME = ".name";
/**
* The value of the property (named as its related column) used to define
* how the column is generated. This indicates that the columns data is
* enclosed within a pair of tags, ie: &lt;id&gt;1234&lt;/id&gt;
*/
public static String CONTENT = "content";
/**
* The value of the property (named as its related column) used to define
* how the column is generated. This indicates that the columns data is
* an attribute in the columns tag. ie: <id value="1234" />
*/
public static String ATTRIBUTE = "attribute";
/**
* This is the default attribute name used when the ATTRIBUTE option is set.
*/
public static String DEFAULT_ATTRIBUTE = "VALUE";
/**
* The value of the property (named as its related column) used to define
* how the column is generated. This indicates that the columns data is
* an attribute in the parent's tag. ie: <row id="1234" />
*/
public static String ROW_ATTRIBUTE = "row";
/**
* This property name marks the begining row number within the ResultSet to
* start processing.
*/
public static String FIRST_ROW = "row.first";
/**
* This property name marks the last row number within the ResultSet to
* end processing.
*/
public static String LAST_ROW = "row.last";
/**
* Constructor
*/
public XMLResultSet()
{
factory = new XMLFactory();
}
/**
* Constructor
*/
public XMLResultSet(ResultSet rs)
{
this();
setResultSet(rs);
}
/**
* Sets the ResultSet to use
* @param rs ResultSet
*/
public void setResultSet(ResultSet rs)
{
this.rs=rs;
}
/**
* @return the current ResultSet
*
*/
public ResultSet getResultSet()
{
return rs;
}
/**
* Sets the Writer to send all output to
* @param out Writer
* @throws IOException from XMLFactory
* @see XMLFactory.setWriter
*/
public void setWriter(Writer out)
throws IOException
{
factory.setWriter(out);
}
/**
* @return Writer output is going to
*/
public Writer getWriter()
{
return factory.getWriter();
}
/**
* @return XMLFactory being used
*/
public XMLFactory getXMLFactory()
{
return factory;
}
/**
* Flushes all output to the Writer
* @throw IOException from Writer
* @throw XMLFactoryException from XMLFactory
*/
public void close()
throws IOException, XMLFactoryException
{
factory.close();
}
/**
* Returns the default properties used by translate() and buildDTD()
* @return Properties default property settings
*/
public static Properties getDefaultProperties()
{
if(defaults==null) {
defaults=new Properties();
defaults.setProperty(RESULTSET_NAME,DEFAULT_RESULTSET_NAME);
defaults.setProperty(ROW_NAME,DEFAULT_ROW_NAME);
}
return defaults;
}
/**
* This generates an XML version of a ResultSet sending it to the supplied
* Writer.
* @param rs ResultSet to convert
* @param p Properties for the conversion
* @param out Writer to send output to (replaces existing one)
* @throws XMLFactoryException from XMLFactory
* @throws IOException from Writer
* @throws SQLException from ResultSet
*/
public void translate(ResultSet rs,Properties p,Writer out)
throws XMLFactoryException, IOException, SQLException
{
factory.setWriter(out);
translate(rs,p);
}
/**
* This generates an XML version of a ResultSet sending it to the supplied
* Writer using a default tag struct
* @param rs ResultSet to convert
* @param out Writer to send output to (replaces existing one)
* @throws XMLFactoryException from XMLFactory
* @throws IOException from Writer
* @throws SQLException from ResultSet
*/
public void translate(ResultSet rs,Writer out)
throws XMLFactoryException, IOException, SQLException
{
factory.setWriter(out);
translate(rs,(Properties)null);
}
/**
* This generates an XML version of a ResultSet sending it to the current
* output stream using a default tag structure.
* @param rs ResultSet to convert
* @throws XMLFactoryException from XMLFactory
* @throws IOException from Writer
* @throws SQLException from ResultSet
*/
public void translate(ResultSet rs)
throws XMLFactoryException, IOException, SQLException
{
translate(rs,(Properties)null);
}
/**
* This generates an XML version of a ResultSet sending it to the current
* output stream.
* @param rs ResultSet to convert
* @param p Properties for the conversion
* @throws XMLFactoryException from XMLFactory
* @throws IOException from Writer
* @throws SQLException from ResultSet
*/
public void translate(ResultSet rs,Properties p)
throws XMLFactoryException, IOException, SQLException
{
// if we don't pass any properties, create an empty one and cache it if
// further calls do the same
if(p==null) {
p=getDefaultProperties();
}
// Fetch some common values
String setName = p.getProperty(RESULTSET_NAME,DEFAULT_RESULTSET_NAME);
String rowName = p.getProperty(ROW_NAME,DEFAULT_ROW_NAME);
ResultSetMetaData rsmd = rs.getMetaData();
int numcols = rsmd.getColumnCount();
String colname[] = new String[numcols]; // field name cache
int coltype[] = new int[numcols]; // true to use attribute false content
String colattr[] = new String[numcols]; // Attribute name
// These deal with when an attribute is to go into the row's tag parameters
int parentFields[] = getRowAttributes(numcols,colname,colattr,coltype,rsmd,p); // used to cache the id's
int numParents= parentFields==null ? 0 : parentFields.length; // number of parent fields
boolean haveParent= numParents>0; // true only if we need to us these
// This allows some limiting of the output result
int firstRow = Integer.parseInt(p.getProperty(FIRST_ROW,"0"));
int lastRow = Integer.parseInt(p.getProperty(LAST_ROW,"0"));
int curRow=0;
// Start the result set's tag
factory.startTag(setName);
while(rs.next()) {
if(firstRow<=curRow && (lastRow==0 || curRow<lastRow)) {
factory.startTag(rowName);
if(haveParent) {
// Add any ROW_ATTRIBUTE entries
for(int i=0;i<numParents;i++)
factory.addAttribute(colname[i],rs.getString(i+1));
}
// Process any CONTENT & ATTRIBUTE entries.
// This skips if all the entries are ROW_ATTRIBUTE's
if(numParents < numcols) {
for(int i=1;i<=numcols;i++) {
// Now do we write the value as an argument or as PCDATA?
switch(coltype[i-1]) {
case 1:
factory.startTag(colname[i-1]);
factory.addAttribute(colattr[i-1],rs.getString(i));
factory.endTag();
break;
case 0:
factory.startTag(colname[i-1]);
factory.addContent(rs.getString(i));
factory.endTag();
break;
default:
// Unknown type. This should only be called for ROW_ATTRIBUTE which
// is handled before this loop.
break;
}
}
}
// End the row
factory.endTag();
}
curRow++;
} // check for firstRow <= curRow <= lastRow
// Close the result set's tag
factory.endTag();
}
/**
* This method takes a ResultSet and writes it's DTD to the current writer
* @param rs ResultSet
*/
public void buildDTD(ResultSet rs)
throws IOException, SQLException
{
buildDTD(rs,null,getWriter());
}
/**
* This method takes a ResultSet and writes it's DTD to the current writer
* @param rs ResultSet
* @param out Writer to send output to
*/
public void buildDTD(ResultSet rs,Writer out)
throws IOException, SQLException
{
buildDTD(rs,null,out);
}
/**
* This method takes a ResultSet and writes it's DTD to the current writer
* @param rs ResultSet
* @param out Writer to send output to
*/
public void buildDTD(ResultSet rs,Properties p)
throws IOException, SQLException
{
buildDTD(rs,p,getWriter());
}
/**
* This method takes a ResultSet and writes it's DTD to the current a.
*
* <p>ToDo:<ol>
* <li>Add ability to have NULLABLE columns appear as optional (ie instead of
* x, have x? (DTD for Optional). Can't use + or * as that indicates more than
* 1 instance).
* </ol>
*
* @param rs ResultSet
* @param p Properties defining tag types (as translate)
* @param out Writer to send output to
*/
public void buildDTD(ResultSet rs,Properties p,Writer out)
throws IOException, SQLException
{
// if we don't pass any properties, create an empty one and cache it if
// further calls do the same
if(p==null) {
p=getDefaultProperties();
}
// Fetch some common values
String setName = p.getProperty(RESULTSET_NAME,DEFAULT_RESULTSET_NAME);
String rowName = p.getProperty(ROW_NAME,DEFAULT_ROW_NAME);
ResultSetMetaData rsmd = rs.getMetaData();
int numcols = rsmd.getColumnCount();
String colname[] = new String[numcols]; // field name cache
int coltype[] = new int[numcols]; // true to use attribute false content
String colattr[] = new String[numcols]; // Attribute name
// These deal with when an attribute is to go into the row's tag parameters
int parentFields[] = getRowAttributes(numcols,colname,colattr,coltype,rsmd,p); // used to cache the id's
int numParents= parentFields==null ? 0 : parentFields.length; // number of parent fields
boolean haveParent= numParents>0; // true only if we need to us these
// Now the dtd defining the ResultSet
out.write("<!ELEMENT ");
out.write(setName);
out.write(" (");
out.write(rowName);
out.write("*)>\n");
// Now the dtd defining each row
out.write("<!ELEMENT ");
out.write(rowName);
out.write(" (");
boolean s=false;
for(int i=0;i<numcols;i++) {
if(coltype[i]!=2) { // not ROW_ATTRIBUTE
if(s)
out.write(",");
out.write(colname[i]);
s=true;
}
}
out.write(")>\n");
// Now handle any ROW_ATTRIBUTE's
if(haveParent) {
out.write("<!ATTLIST ");
out.write(rowName);
for(int i=0;i<numParents;i++) {
out.write("\n ");
out.write(colname[parentFields[i]]);
out.write(" CDATA #IMPLIED");
}
out.write("\n>\n");
}
// Now add any CONTENT & ATTRIBUTE fields
for(int i=0;i<numcols;i++) {
if(coltype[i]!=2) {
out.write("<!ELEMENT ");
out.write(colname[i]);
// CONTENT
if(coltype[i]==0) {
out.write(" (#PCDATA)");
} else {
out.write(" EMPTY");
}
out.write(">\n");
// ATTRIBUTE
if(coltype[i]==1) {
out.write("<!ATTLIST ");
out.write(colname[i]);
out.write("\n ");
out.write(colattr[i]);
out.write(" CDATA #IMPLIED\n>\n");
}
}
}
}
/**
* Private method used by the core translate and buildDTD methods.
* @param numcols Number of columns in ResultSet
* @param colname Array of column names
* @param colattr Array of column attribute names
* @param coltype Array of column types
* @param rsmd ResultSetMetaData for ResultSet
* @param p Properties being used
* @return array containing field numbers which should appear as attributes
* within the rows tag.
* @throws SQLException from JDBC
*/
private int[] getRowAttributes(int numcols,
String colname[],String colattr[],
int coltype[],
ResultSetMetaData rsmd,Properties p)
throws SQLException
{
int pf[] = null;
int nf = 0;
// Now we put a columns value as an attribute if the property
// fieldname=attribute (ie myname=attribute)
// and if the fieldname.name property exists, use it as the attribute name
for(int i=0;i<numcols;i++) {
colname[i] = rsmd.getColumnName(i+1);
colattr[i] = p.getProperty(colname[i]+COLNAME,DEFAULT_ATTRIBUTE);
if(p.getProperty(colname[i],CONTENT).equals(ROW_ATTRIBUTE)) {
// Ok, ROW_ATTRIBUTE's need to be cached, so add them in here
coltype[i]=2;
if(pf==null) {
pf = new int[numcols]; // Max possible number of entries
}
pf[nf++] = i;
} else {
// Normal CONTENT or ATTRIBUTE entry
coltype[i] = p.getProperty(colname[i],CONTENT).equals(ATTRIBUTE) ? 1 : 0;
}
}
// Form an array exactly nf elements long
if(nf>0) {
int r[] = new int[nf];
System.arraycopy(pf,0,r,0,nf);
return r;
}
// Return null if no tags are to appear as attributes to the row's tag
return null;
}
}

View File

@ -0,0 +1,201 @@
package uk.org.retep.xml.parser;
import java.io.CharArrayWriter;
import java.io.IOException;
import java.util.List;
import java.util.Iterator;
import java.util.HashSet;
import java.util.ArrayList;
import java.util.HashMap;
import org.xml.sax.AttributeList;
import org.xml.sax.HandlerBase;
import org.xml.sax.InputSource;
import org.xml.sax.Parser;
import org.xml.sax.SAXException;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
/**
* This class implements the base of the XML handler. You create an instance,
* register classes (who implement TagListener) that are interested in the tags
* and pass it to SAX.
*
* <p>Or you create an instance, register the TagListeners and use the getParser()
* method to create a Parser. Then start parsing by calling it's parse() method.
*/
public class TagHandler extends HandlerBase {
/**
* The current active level
*/
private int level;
/**
* cache used to handle nesting of tags
*/
private List contents;
/**
* cache used to handle nesting of tags
*/
private List tags;
/**
* cache used to handle nesting of tags
*/
private List args;
// Current active content writer
private CharArrayWriter content;
// List of TagListener's who want to be fed data
private HashSet tagListeners;
/**
* default constructor
*/
public TagHandler() {
level=0;
contents = new ArrayList();
tags = new ArrayList();
args = new ArrayList();
tagListeners = new HashSet();
}
/**
* Called by SAX when a tag is begun. This simply creates a new level in the
* cache and stores the parameters and tag name in there.
*/
public void startElement(String p0, AttributeList p1) throws SAXException {
// Now move up and fetch a CharArrayWriter from the cache
// creating if this is the first time at this level
if(contents.size()<=level) {
contents.add(new CharArrayWriter());
tags.add(p0);
args.add(new HashMap());
}
content=(CharArrayWriter) contents.get(level);
content.reset();
// Also cache the tag's text and argument list
tags.set(level,p0);
HashMap h = (HashMap) args.get(level);
h.clear();
for(int i=p1.getLength()-1;i>-1;i--) {
h.put(p1.getName(i),p1.getValue(i));
}
// Now notify any TagListeners
Iterator it = tagListeners.iterator();
while(it.hasNext())
( (TagListener) it.next() ).tagStart(level,p0,h);
// Now move up a level
level++;
}
/**
* This is called by SAX at the end of a tag. This calls handleTag() and then
* raises the level, so that the previous parent tag may continue.
*/
public void endElement(String p0) throws SAXException {
// move up a level retrieving that level's current content
// Now this exception should never occur as the underlying parser should
// actually trap it.
if(level<1)
throw new SAXException("Already at top level?");
level--;
// Now notify any TagListeners
Iterator it = tagListeners.iterator();
while(it.hasNext())
( (TagListener) it.next() ).tagContent(content);
// allows large content to be released early
content.reset();
// Now reset content to the previous level
content=(CharArrayWriter) contents.get(level);
}
/**
* Called by SAX so that content between the start and end tags are captured.
*/
public void characters(char[] p0, int p1, int p2) throws SAXException {
content.write(p0,p1,p2);
}
/**
* Adds a TagListener so that it is notified of tags as they are processed.
* @param handler TagListener to add
*/
public void addTagListener(TagListener h) {
tagListeners.add(h);
}
/**
* Removes the TagListener so it no longer receives notifications of tags
*/
public void removeTagListener(TagListener h) {
tagListeners.remove(h);
}
/**
* This method returns a org.xml.sax.Parser object that will parse the
* contents of a URI.
*
* <p>Normally you would call this method, then call the parse(uri) method of
* the returned object.
* @return org.xml.sax.Parser object
*/
public Parser getParser()
throws SAXException
{
try {
SAXParserFactory spf = SAXParserFactory.newInstance();
String validation = System.getProperty ("javax.xml.parsers.validation", "false");
if (validation.equalsIgnoreCase("true"))
spf.setValidating (true);
SAXParser sp = spf.newSAXParser();
Parser parser = sp.getParser ();
parser.setDocumentHandler(this);
return(parser);
} catch(ParserConfigurationException pce) {
throw new SAXException(pce.toString());
}
}
/**
* This method will parse the specified URI.
*
* <p>Internally this is the same as getParser().parse(uri);
* @param uri The URI to parse
*/
public void parse(String uri)
throws IOException, SAXException
{
getParser().parse(uri);
}
/**
* This method will parse the specified InputSource.
*
* <p>Internally this is the same as getParser().parse(is);
* @param is The InputSource to parse
*/
public void parse(InputSource is)
throws IOException, SAXException
{
getParser().parse(is);
}
}

View File

@ -0,0 +1,30 @@
package uk.org.retep.xml.parser;
import java.util.HashMap;
import java.io.CharArrayWriter;
/**
* This interface defines the methods a class needs to implement if it wants the
* xml parser to notify it of any xml tags.
*/
public interface TagListener {
/**
* This is called when a tag has just been started.
* <p><b>NB:</b> args is volatile, so if you use it beyond the lifetime of
* this call, then you must make a copy of the HashMap (and not use simply
* store this HashMap).
* @param level The number of tags above this
* @param tag The tag name
* @param args A HashMap of any arguments
*/
public void tagStart(int level,String tag,HashMap args);
/**
* This method is called by ContHandler to process a tag once it has been
* fully processed.
* <p><b>NB:</b> content is volatile, so you must copy its contents if you use
* it beyond the lifetime of this call.
* @param content CharArrayWriter containing the content of the tag.
*/
public void tagContent(CharArrayWriter content);
}

View File

@ -0,0 +1,191 @@
package uk.org.retep.xml.test;
import java.lang.Exception;
import java.io.*;
import java.sql.*;
import java.util.Properties;
import uk.org.retep.xml.core.XMLFactoryException;
import uk.org.retep.xml.jdbc.XMLDatabase;
import uk.org.retep.xml.jdbc.XMLResultSet;
/**
* This "test" class is a fully functional tool in its own right. It utilises
* the xml classes to query and export to XML, or to dump database structures
* into XML.
*/
public class XMLExport
{
/**
* The current Database Connection
*/
protected Connection conn;
protected Statement stat;
protected String drvr,url,table;
protected XMLResultSet xrs;
protected XMLDatabase xdb;
protected Properties prop;
protected boolean outXML;
protected boolean outDTD;
protected boolean outTAB;
protected int maxRows=0;
public XMLExport(String[] args)
throws IOException,SQLException,XMLFactoryException,ClassNotFoundException
{
xrs = new XMLResultSet();
xrs.setWriter(new OutputStreamWriter(System.out));
//Properties p = new Properties(xrs.getDefaultProperties());
prop = (Properties) xrs.getDefaultProperties().clone();
xdb = new XMLDatabase(xrs.getXMLFactory());
for(int i=0;i<args.length;i++) {
String arg=args[i];
if(arg.startsWith("-D")) {
// Load JDBC Driver
drvr=arg.substring(2);
Class.forName(drvr);
System.out.println("Now using JDBC Driver: "+drvr);
} else if(arg.startsWith("-J")) {
// Open a JDBC Connection (closing the existing one, if any)
close();
url = arg.substring(2);
conn = DriverManager.getConnection(url);
System.out.println("Opened "+url);
stat=null;
} else if(arg.startsWith("-M")) {
// Set the maximum number of rows to process (0=no limit)
maxRows=Integer.parseInt(arg.substring(2));
if(maxRows<0)
maxRows=0;
prop.setProperty(XMLResultSet.FIRST_ROW,"0");
prop.setProperty(XMLResultSet.LAST_ROW,Integer.toString(maxRows));
} else if(arg.startsWith("-O")) {
// Set the output file for XML & DTD
xrs.setWriter(new FileWriter(arg.substring(2)));
System.out.println("XML/DTD Output now going to "+arg.substring(2));
} else if(arg.startsWith("-P")) {
// Set a parameter for XML & DTD
int p = arg.indexOf('=');
prop.setProperty(arg.substring(2,p),arg.substring(p+1));
} else if(arg.startsWith("-S")) {
// -Stable generate schema of just table
// -S generate schema of entire database
if(arg.length()>2) {
String table=arg.substring(2);
System.out.println("Generating XML Schema of table "+table);
xdb.writeTable(conn,table);
xdb.close();
} else {
System.out.println("Generating XML Schema of database");
xdb.writeDatabase(conn);
xdb.close();
}
} else if(arg.equals("-V")) {
// Select table output
outXML=outDTD=false;
} else if(arg.equals("-X")) {
// Select XML output
outXML=true;
outDTD=outTAB=false;
} else if(arg.equals("-Y")) {
// Select DTD output
outXML=outTAB=false;
outDTD=true;
} else if(arg.startsWith("-")) {
System.err.println("Unknown argument: "+arg);
System.exit(1);
} else {
// Ok, anything not starting with "-" are queries
if(stat==null)
stat=conn.createStatement();
System.out.println("Executing "+arg);
ResultSet rs = stat.executeQuery(arg);
if(rs!=null) {
if(outXML) {
xrs.translate(rs,prop);
xrs.close();
} else if(outDTD) {
// Output the DTD
xrs.buildDTD(rs,prop);
xrs.close();
} else {
// Normal resultset output
int rc=0;
ResultSetMetaData rsmd = rs.getMetaData();
int nc = rsmd.getColumnCount();
boolean us=false;
for(int c=0;c<nc;c++) {
if(us)
System.out.print("\t");
System.out.print(rsmd.getColumnName(c+1));
us=true;
}
System.out.println();
while(rs.next() && (maxRows==0 || rc<maxRows)) {
us=false;
for(int c=0;c<nc;c++) {
if(us)
System.out.print("\t");
System.out.print(rs.getString(c+1));
us=true;
}
System.out.println();
rc++;
}
System.out.println("Returned "+rc+" rows.");
}
rs.close();
}
}
}
close();
}
public void close() throws SQLException
{
if(conn!=null) {
conn.close();
System.out.println("Closed "+url);
conn=null;
stat=null;
}
}
public static void main(String[] args)
{
if(args.length==0) {
System.out.println("Useage: java uk.org.retep.xml.test.XMLExport [args ...]\nwhere args are:\n"+
"-Dclass.name JDBC Driver Class\n"+
"-Jurl JDBC URL\n"+
"-Mmax Maximum number of rows to process\n"+
"-Ofilename Send all XML or DTD output to file\n"+
"-Pkey=value Property passed on to XMLResultSet\n"+
"-S[table] Write XML description of table. Whole DB if table left out.\n"+
"-V Default: Write result to System.out\n"+
"-X Write result in XML to System.out\n"+
"-Y Write DTD describing result to System.out\n"+
"\nAny other argument not starting with - is treated as an SQL Query\n"+
"\nFor example:\n"+
"To dump the table structure of a database into db.xml, use\n $ java uk.org.retep.xml.test.XMLExport -Doracle.jdbc.driver.OracleDriver -Jjdbc:oracle:thin:dbname/username@localhost:1521:ISORCL -Odb.xml -S\n"+
"To dump the structure of a single table PRODUCTS and write into products.xml, use\n $ clear;java uk.org.retep.xml.test.XMLExport -Doracle.jdbc.driver.OracleDriver-Jjdbc:oracle:thin:dbname/username@localhost:1521:ISORCL -Oproducts.xml -SPRODUCT\n"+
"To query a table and write the results into standard out as XML, use\n $ java uk.org.retep.xml.test.XMLExport -Doracle.jdbc.driver.OracleDriver -Jjdbc:oracle:thin:dbname/username@localhost:1521:ISORCL -M5 -PSKU=row -PIMAGE=attribute -X \"select sku,image,template from product\"\n"+
"To query a table and write a DTD describing the ResultSet, use\n $ java uk.org.retep.xml.test.XMLExport -Doracle.jdbc.driver.OracleDriver -Jjdbc:oracle:thin:dbname/username@localhost:1521:ISORCL -M5 -PSKU=row -PIMAGE=attribute -Y \"select sku,image,template from product\"\n"
);
System.exit(1);
}
try {
XMLExport XMLExport1 = new XMLExport(args);
} catch(Exception e) {
e.printStackTrace();
}
}
}

118
contrib/xml/README Normal file
View File

@ -0,0 +1,118 @@
This package contains some simple routines for manipulating XML
documents stored in PostgreSQL. This is a work-in-progress and
somewhat basic at the moment (see the file TODO for some outline of
what remains to be done).
At present, two modules (based on different XML handling libraries)
are provided.
Prerequisite:
pgxml.c:
expat parser 1.95.0 or newer (http://expat.sourceforge.net)
or
pgxml_dom.c:
libxml2 (http://xmlsoft.org)
The libxml2 version provides more complete XPath functionality, and
seems like a good way to go. I've left the old versions in there for
comparison.
Compiling and loading:
----------------------
The Makefile only builds the libxml2 version.
To compile, just type make.
Then you can use psql to load the two function definitions:
\i pgxml_dom.sql
Function documentation and usage:
---------------------------------
pgxml_parse(text) returns bool
parses the provided text and returns true or false if it is
well-formed or not. It returns NULL if the parser couldn't be
created for any reason.
pgxml_xpath (XQuery functions) - differs between the versions:
pgxml.c (expat version) has:
pgxml_xpath(text doc, text xpath, int n) returns text
parses doc and returns the cdata of the nth occurence of
the "simple path" entry.
However, the remainder of this document will cover the pgxml_dom.c version.
pgxml_xpath(text doc, text xpath, text toptag, text septag) returns text
evaluates xpath on doc, and returns the result wrapped in
<toptag>...</toptag> and each result node wrapped in
<septag></septag>. toptag and septag may be empty strings, in which
case the respective tag will be omitted.
Example:
Given a table docstore:
Attribute | Type | Modifier
-----------+---------+----------
docid | integer |
document | text |
containing documents such as (these are archaeological site
descriptions, in case anyone is wondering):
<?XML version="1.0"?>
<site provider="Foundations" sitecode="ak97" version="1">
<name>Church Farm, Ashton Keynes</name>
<invtype>watching brief</invtype>
<location scheme="osgb">SU04209424</location>
</site>
one can type:
select docid,
pgxml_xpath(document,'//site/name/text()','','') as sitename,
pgxml_xpath(document,'//site/location/text()','','') as location
from docstore;
and get as output:
docid | sitename | location
-------+--------------------------------------+------------
1 | Church Farm, Ashton Keynes | SU04209424
2 | Glebe Farm, Long Itchington | SP41506500
3 | The Bungalow, Thames Lane, Cricklade | SU10229362
(3 rows)
or, to illustrate the use of the extra tags:
select docid as id,
pgxml_xpath(document,'//find/type/text()','set','findtype')
from docstore;
id | pgxml_xpath
----+-------------------------------------------------------------------------
1 | <set></set>
2 | <set><findtype>Urn</findtype></set>
3 | <set><findtype>Pottery</findtype><findtype>Animal bone</findtype></set>
(3 rows)
Which produces a new, well-formed document. Note that document 1 had
no matching instances, so the set returned contains no
elements. document 2 has 1 matching element and document 3 has 2.
This is just scratching the surface because XPath allows all sorts of
operations.
Note: I've only implemented the return of nodeset and string values so
far. This covers (I think) many types of queries, however.
John Gray <jgray@azuli.co.uk> 16 August 2001

7
contrib/xml/pgxml.source Normal file
View File

@ -0,0 +1,7 @@
--SQL for XML parser
CREATE FUNCTION pgxml_parse(text) RETURNS bool
AS '_OBJWD_/pgxml_DLSUFFIX_' LANGUAGE 'c' WITH (isStrict);
CREATE FUNCTION pgxml_xpath(text,text,text,text) RETURNS text
AS '_OBJWD_/pgxml_DLSUFFIX_' LANGUAGE 'c' WITH (isStrict);

View File

@ -0,0 +1,7 @@
--SQL for XML parser
CREATE FUNCTION pgxml_parse(text) RETURNS bool
AS '_OBJWD_/pgxml_dom_DLSUFFIX_' LANGUAGE 'c' WITH (isStrict);
CREATE FUNCTION pgxml_xpath(text,text,text,text) RETURNS text
AS '_OBJWD_/pgxml_dom_DLSUFFIX_' LANGUAGE 'c' WITH (isStrict);

152
doc/src/sgml/libpgeasy.sgml Normal file
View File

@ -0,0 +1,152 @@
<!--
$Header: /cvsroot/pgsql/doc/src/sgml/Attic/libpgeasy.sgml,v 2.9 2002/03/04 18:50:20 momjian Exp $
-->
<chapter id="pgeasy">
<title><application>libpgeasy</application> - Simplified C Library</title>
<note>
<title>Author</title>
<para>
Written by Bruce Momjian
(<email>pgman@candle.pha.pa.us</email>)
and last updated 2002-03-04
</para>
</note>
<para>
<application>pgeasy</application> allows you to cleanly interface
to the <application>libpq</application> library, more like a 4GL
SQL interface. Refer to <xref linkend="libpq"> for more
information about <application>libpq</application>.
</para>
<para>
It consists of a set of simplified C functions that encapsulate the
functionality of <application>libpq</application>. The functions are:
<itemizedlist>
<listitem>
<synopsis>
PGresult *doquery(char *query);
</synopsis>
</listitem>
<listitem>
<synopsis>
PGconn *connectdb(char *options);
</synopsis>
</listitem>
<listitem>
<synopsis>
void disconnectdb();
</synopsis>
</listitem>
<listitem>
<synopsis>
int fetch(void *param,...);
</synopsis>
</listitem>
<listitem>
<synopsis>
int fetchwithnulls(void *param,...);
</synopsis>
</listitem>
<listitem>
<synopsis>
void reset_fetch();
</synopsis>
</listitem>
<listitem>
<synopsis>
void on_error_continue();
</synopsis>
</listitem>
<listitem>
<synopsis>
void on_error_stop();
</synopsis>
</listitem>
<listitem>
<synopsis>
PGresult *get_result();
</synopsis>
</listitem>
<listitem>
<synopsis>
void set_result(PGresult *newres);
</synopsis>
</listitem>
</itemizedlist>
</para>
<para>
Many functions return a structure or value, so you can work
with the result if required.
</para>
<para>
You basically connect to the database with
<function>connectdb</function>, issue your query with
<function>doquery</function>, fetch the results with
<function>fetch</function>, and finish with
<function>disconnectdb</function>.
</para>
<para>
For <literal>SELECT</literal> queries, <function>fetch</function>
allows you to pass pointers as parameters, and on return the
variables are filled with data from the binary cursor you opened.
These binary cursors cannot be used if you are running the
<application>pgeasy</application> client on a system with a different
architecture than the database server. If you pass a NULL pointer
parameter, the column is skipped. <function>fetchwithnulls</function>
allows you to retrieve the NULL status of the field by passing an
<literal>int*</literal> after each result pointer, which returns true
or false to indicate if the field is null. You can always use
<application>libpq</application> functions on the
<structname>PGresult</structname> pointer returned by
<function>doquery</function>. <function>reset_fetch</function> starts
the fetch back at the beginning.
</para>
<para>
<function>get_result</function> and <function>set_result</function>
allow you to handle multiple open result sets. Use
<function>get_result</function> to save a result into an application
variable. You can then later use <function>set_result</function> to
return to the previously save result.
</para>
<para>
There are several demonstration programs in
<filename>pgsql/src/interfaces/libpgeasy/examples</>.
</para>
</chapter>
<!-- Keep this comment at the end of the file
Local variables:
mode:sgml
sgml-omittag:nil
sgml-shorttag:t
sgml-minimize-attributes:nil
sgml-always-quote-attributes:t
sgml-indent-step:1
sgml-indent-data:t
sgml-parent-document:nil
sgml-default-dtd-file:"./reference.ced"
sgml-exposed-tags:nil
sgml-local-catalogs:("/usr/lib/sgml/catalog")
sgml-local-ecat-files:nil
End:
-->

805
doc/src/sgml/odbc.sgml Normal file
View File

@ -0,0 +1,805 @@
<!--
$Header: /cvsroot/pgsql/doc/src/sgml/Attic/odbc.sgml,v 1.30 2002/03/22 19:20:16 petere Exp $
-->
<chapter id="odbc">
<docinfo>
<authorgroup>
<author>
<firstname>Tim</firstname>
<surname>Goeke</surname>
</author>
<author>
<firstname>Thomas</firstname>
<surname>Lockhart</surname>
</author>
</authorgroup>
<date>1998-10-21</date>
</docinfo>
<title>ODBC Interface</title>
<indexterm zone="odbc">
<primary>ODBC</primary>
</indexterm>
<sect1 id="odbc-intro">
<title>Introduction</title>
<note>
<para>
Background information originally by Tim Goeke
(<email>tgoeke@xpressway.com</email>)
</para>
</note>
<para>
<acronym>ODBC</acronym> (Open Database Connectivity) is an abstract
<acronym>API</acronym>
that allows you to write applications that can interoperate
with various <acronym>RDBMS</acronym> servers.
<acronym>ODBC</acronym> provides a product-neutral interface
between frontend applications and database servers,
allowing a user or developer to write applications that are
portable between servers from different manufacturers..
</para>
<para>
The <acronym>ODBC</acronym> <acronym>API</acronym> matches up
on the backend to an <acronym>ODBC</acronym>-compatible data source.
This could be anything from a text file to an Oracle or
<productname>PostgreSQL</productname> <acronym>RDBMS</acronym>.
</para>
<para>
The backend access comes from <acronym>ODBC</acronym> drivers,
or vendor-specific drivers that
allow data access. <productname>psqlODBC</productname>, which is included in the <productname>PostgreSQL</> distribution, is such a driver,
along with others that are
available, such as the <productname>OpenLink</productname> <acronym>ODBC</acronym> drivers.
</para>
<para>
Once you write an <acronym>ODBC</acronym> application,
you <emphasis>should</emphasis> be able to connect to <emphasis>any</emphasis>
back-end database, regardless of the vendor, as long as the database schema
is the same.
</para>
<para>
For example. you could have <productname>MS SQL Server</productname>
and <productname>PostgreSQL</productname> servers that have
exactly the same data. Using <acronym>ODBC</acronym>,
your Windows application would make exactly the
same calls and the back-end data source would look the same (to the Windows
application).
</para>
</sect1>
<sect1 id="odbc-install">
<title>Installation</title>
<para>
In order to make use of an <acronym>ODBC</> driver there must
exist a <firstterm>driver manager</> on the system where the
<acronym>ODBC</> driver is to be used. There are two free
<acronym>ODBC</> driver managers for Unix-like operating systems
known to us: <indexterm><primary>iODBC</primary></indexterm>
<ulink url="http://www.iodbc.org"><productname>iODBC</></ulink>
and <indexterm><primary>unixODBC</primary></indexterm> <ulink
url="http://www.unixodbc.org"><productname>unixODBC</></ulink>.
Instructions for installing these driver managers are to be found
in the respective distribution. Software that provides database
access through <acronym>ODBC</acronym> should provide its own
driver manager (which may well be one of these two). Having said
that, any driver manager that you can find for your platform
should support the <productname>PostgreSQL</> <acronym>ODBC</>
driver, or any other <acronym>ODBC</> driver for that matter.
</para>
<note>
<para>
The <productname>unixODBC</> distribution ships with a
<productname>PostgreSQL</> <acronym>ODBC</> driver of its own,
which is similar to the one contained in the
<productname>PostgreSQL</> distribution. It is up to you which
one you want to use. We plan to coordinate the development of
both drivers better in the future.
</para>
</note>
<para>
To install the <acronym>ODBC</> you simply need to supply the
<option>--enable-odbc</> option to the <filename>configure</>
script when you are building the entire <productname>PostgreSQL</>
distribution. The library will then automatically be built and
installed with the rest of the programs. If you forget that option
or want to build the ODBC driver later you can change into the
directory <filename>src/interfaces/odbc</> and do <literal>make</>
and <literal>make install</> there.
</para>
<para>
It is also possible to build the driver to be specifically tuned
for use with <productname>iODBC</> or <productname>unixODBC</>.
This means in particular that the driver will use the driver
manager's routines to process the configuration files, which is
probably desirable since it creates a more consistent
<acronym>ODBC</> environment on your system. If you want to do
that, then supply the <filename>configure</> options
<option>--with-iodbc</> or <option>--with-unixodbc</> (but not
both).
</para>
<para>
If you build a <quote>stand-alone</quote> driver (not tied to
<productname>iODBC</> or <productname>unixODBC</>), then you can
specify where the driver should look for the configuration file
<filename>odbcinst.ini</>. By default it will be the directory
<filename>/usr/local/pgsql/etc/</>, or equivalent, depending on
what <option>--prefix</> and/or <option>--sysconfdir</> options
you supplied to <filename>configure</>. To select a specific
location outside the <productname>PostgreSQL</> installation
layout, use the <option>--with-odbcinst</> option. To be most
useful, it should be arranged that the driver and the driver
manager read the same configuration file.
</para>
<para>
<indexterm><primary>odbc.sql</></>
Additionally, you should install the ODBC catalog extensions. That will
provide a number of functions mandated by the ODBC standard that are not
supplied by <productname>PostgreSQL</> by default. The file
<filename>/usr/local/pgsql/share/odbc.sql</> (in the default installation layout)
contains the appropriate definitions, which you can install as follows:
<programlisting>
psql -d template1 -f <replaceable>LOCATION</>/odbc.sql
</programlisting>
where specifying <literal>template1</literal> as the target
database will ensure that all subsequent new databases will have
these same definitions. If for any reason you want to remove
these functions again, run the file
<filename>odbc-drop.sql</filename> through
<command>psql</command>.
</para>
</sect1>
<sect1 id="odbc-config">
<title>Configuration Files</title>
<indexterm zone="odbc-config"><primary>.odbc.ini</></>
<para>
<filename>~/.odbc.ini</filename> contains user-specified access information
for the <productname>psqlODBC</productname> driver.
The file uses conventions typical for <productname>Windows</productname>
Registry files.
</para>
<para>
The <filename>.odbc.ini</filename> file has three required sections.
The first is <literal>[ODBC Data Sources]</literal>
which is a list of arbitrary names and descriptions for each database
you wish to access. The second required section is the
Data Source Specification and there will be one of these sections
for each database.
Each section must be labeled with the name given in
<literal>[ODBC Data Sources]</literal> and must contain the following entries:
<programlisting>
Driver = <replaceable>prefix</replaceable>/lib/libpsqlodbc.so
Database = <replaceable>DatabaseName</replaceable>
Servername = localhost
Port = 5432
</programlisting>
<tip>
<para>
Remember that the <productname>PostgreSQL</productname> database name is
usually a single word, without path names of any sort.
The <productname>PostgreSQL</productname> server manages the actual access
to the database, and you need only specify the name from the client.
</para>
</tip>
Other entries may be inserted to control the format of the display.
The third required section is <literal>[ODBC]</literal>
which must contain the <literal>InstallDir</literal> keyword
and which may contain other options.
</para>
<para>
Here is an example <filename>.odbc.ini</filename> file,
showing access information for three databases:
<programlisting>
[ODBC Data Sources]
DataEntry = Read/Write Database
QueryOnly = Read-only Database
Test = Debugging Database
Default = Postgres Stripped
[DataEntry]
ReadOnly = 0
Servername = localhost
Database = Sales
[QueryOnly]
ReadOnly = 1
Servername = localhost
Database = Sales
[Test]
Debug = 1
CommLog = 1
ReadOnly = 0
Servername = localhost
Username = tgl
Password = "no$way"
Port = 5432
Database = test
[Default]
Servername = localhost
Database = tgl
Driver = /opt/postgres/current/lib/libpsqlodbc.so
[ODBC]
InstallDir = /opt/applix/axdata/axshlib
</programlisting>
</para>
</sect1>
<sect1 id="odbc-windows">
<title><productname>Windows</productname> Applications</title>
<para>
In the real world, differences in drivers and the level of
<acronym>ODBC</acronym> support
lessens the potential of <acronym>ODBC</acronym>:
<itemizedlist spacing="compact" mark="bullet">
<listitem>
<para>
Access, Delphi, and Visual Basic all support <acronym>ODBC</acronym> directly.
</para>
</listitem>
<listitem>
<para>
Under C++, such as Visual C++,
you can use the C++ <acronym>ODBC</acronym> <acronym>API</acronym>.
</para>
</listitem>
<listitem>
<para>
In Visual C++, you can use the <classname>CRecordSet</classname> class, which wraps the
<acronym>ODBC</acronym> <acronym>API</acronym>
set within an <application>MFC</application> 4.2 class. This is the easiest route if you are doing
Windows C++ development under Windows NT.
</para>
</listitem>
</itemizedlist>
</para>
<sect2>
<title>Writing Applications</title>
<para>
<quote>
If I write an application for <productname>PostgreSQL</productname>
can I write it using <acronym>ODBC</acronym> calls
to the <productname>PostgreSQL</productname> server,
or is that only when another database program
like MS SQL Server or Access needs to access the data?
</quote>
</para>
<para>
The <acronym>ODBC</acronym> <acronym>API</acronym>
is the way to go.
For <productname>Visual C++</productname> coding you can find out more at
Microsoft's web site or in your <productname>Visual C++</productname>
documentation.
</para>
<para>
Visual Basic and the other <acronym>RAD</acronym> tools have <classname>Recordset</classname> objects
that use <acronym>ODBC</acronym>
directly to access data. Using the data-aware controls, you can quickly
link to the <acronym>ODBC</acronym> back-end database
(<emphasis>very</emphasis> quickly).
</para>
<para>
Playing around with <productname>MS Access</> will help you sort this out. Try using
<menuchoice><guimenu>File</><guimenuitem>Get External Data</></menuchoice>.
</para>
<tip>
<para>
You'll have to set up a <acronym>DSN</acronym> first.
</para>
</tip>
</sect2>
</sect1>
<sect1 id="odbc-applixware">
<title><application>ApplixWare</application></title>
<indexterm zone="odbc-applixware">
<primary>Applixware</primary>
</indexterm>
<para>
<productname>Applixware</productname> has an
<acronym>ODBC</acronym> database interface
supported on at least some platforms.
<productname>Applixware</productname> 4.4.2 has been
demonstrated under Linux with <productname>PostgreSQL</productname> 7.0
using the <productname>psqlODBC</productname>
driver contained in the <productname>PostgreSQL</productname> distribution.
</para>
<sect2>
<title>Configuration</title>
<para>
<productname>Applixware</productname> must be configured correctly
in order for it to
be able to access the <productname>PostgreSQL</productname>
<acronym>ODBC</acronym> software drivers.
</para>
<procedure>
<title>Enabling <application>Applixware</application> Database Access</title>
<para>
These instructions are for the 4.4.2 release of
<productname>Applixware</productname> on <productname>Linux</productname>.
Refer to the <citetitle>Linux Sys Admin</citetitle> on-line book
for more detailed information.
</para>
<step performance="required">
<para>
You must modify <filename>axnet.cnf</filename> so that
<filename>elfodbc</filename> can
find <filename>libodbc.so</filename>
(the <acronym>ODBC</acronym> driver manager) shared library.
This library is included with the <application>Applixware</application> distribution,
but <filename>axnet.cnf</filename> needs to be modified to point to the
correct location.
</para>
<para>
As root, edit the file
<filename><replaceable>applixroot</replaceable>/applix/axdata/axnet.cnf</filename>.
</para>
<substeps>
<step performance="required">
<para>
At the bottom of <filename>axnet.cnf</filename>,
find the line that starts with
<programlisting>
#libFor elfodbc /ax/<replaceable>...</replaceable>
</programlisting>
</para>
</step>
<step performance="required">
<para>
Change line to read
<programlisting>
libFor elfodbc <replaceable>applixroot</replaceable>/applix/axdata/axshlib/lib
</programlisting>
which will tell <literal>elfodbc</literal> to look in this directory
for the <acronym>ODBC</acronym> support library.
Typically <productname>Applix</productname> is installed in
<filename>/opt</filename> so the full path would be
<filename>/opt/applix/axdata/axshlib/lib</filename>,
but if you have installed <productname>Applix</productname>
somewhere else then change the path accordingly.
</para>
</step>
</substeps>
</step>
<step performance="required">
<para>
Create <filename>.odbc.ini</filename> as
described in <xref linkend="odbc-config">. You may also want to add the flag
<programlisting>
TextAsLongVarchar=0
</programlisting>
to the database-specific portion of <filename>.odbc.ini</filename>
so that text fields will not be shown as <literal>**BLOB**</literal>.
</para>
</step>
</procedure>
<procedure>
<title>Testing <application>Applixware</application> ODBC Connections</title>
<step performance="required">
<para>
Bring up <application>Applix Data</application>
</para>
</step>
<step performance="required">
<para>
Select the <productname>PostgreSQL</productname> database of interest.
</para>
<substeps>
<step performance="required">
<para>
Select <menuchoice><guimenu>Query</guimenu><guimenuitem>Choose Server</guimenuitem></menuchoice>.
</para>
</step>
<step performance="required">
<para>
Select <guimenuitem>ODBC</guimenuitem>, and click <guibutton>Browse</guibutton>.
The database you configured in <filename>.odbc.ini</filename>
should be shown. Make sure that the <guilabel>Host:</guilabel> field
is empty (if it is not, <literal>axnet</> will try to contact <literal>axnet</> on another machine
to look for the database).
</para>
</step>
<step performance="required">
<para>
Select the database in the box that was launched by <guibutton>Browse</guibutton>,
then click <guibutton>OK</guibutton>.
</para>
</step>
<step performance="required">
<para>
Enter user name and password in the login identification dialog,
and click <guibutton>OK</guibutton>.
</para>
</step>
</substeps>
<para>
You should see <guilabel>Starting elfodbc server</guilabel>
in the lower left corner of the
data window. If you get an error dialog box, see the debugging section
below.
</para>
</step>
<step performance="required">
<para>
The <quote>Ready</quote> message will appear in the lower left corner of the data
window. This indicates that you can now enter queries.
</para>
</step>
<step performance="required">
<para>
Select a table from
<menuchoice><guimenu>Query</><guimenuitem>Choose
tables</></menuchoice>, and then select
<menuchoice><guimenu>Query</><guimenuitem>Query</></menuchoice>
to access the database. The first 50 or so rows from the table
should appear.
</para>
</step>
</procedure>
</sect2>
<sect2>
<title>Common Problems</title>
<para>
The following messages can appear while trying to make an
<acronym>ODBC</acronym> connection through
<productname>Applix Data</productname>:
<variablelist>
<varlistentry>
<term>
<computeroutput>Cannot launch gateway on server</computeroutput>
</term>
<listitem>
<para>
<literal>elfodbc</literal> can't find <filename>libodbc.so</filename>.
Check your <filename>axnet.cnf</filename>.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term><computeroutput>
Error from ODBC Gateway:
IM003::[iODBC][Driver Manager]Specified driver could not be loaded</computeroutput>
</term>
<listitem>
<para>
<filename>libodbc.so</filename> cannot find the driver listed in
<filename>.odbc.ini</filename>. Verify the settings.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<computeroutput>Server: Broken Pipe</computeroutput>
</term>
<listitem>
<para>
The driver process has terminated due to some other
problem. You might not have an up-to-date version
of the <productname>PostgreSQL</productname>
<acronym>ODBC</acronym> package.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<computeroutput>setuid to 256: failed to launch gateway</computeroutput>
</term>
<listitem>
<para>
The September release of <application>Applixware</application> 4.4.1 (the first release with official
<acronym>ODBC</acronym> support under Linux) shows problems when user names
exceed eight (8) characters in length.
Problem description contributed by Steve Campbell
(<email>scampbell@lear.com</email>).
</para>
</listitem>
</varlistentry>
</variablelist>
</para>
<para>
<note>
<title>Author</title>
<para>
Contributed by Steve Campbell (<email>scampbell@lear.com</email>),
1998-10-20
</para>
</note>
The <application>axnet</application> program's security system
seems a little suspect. <application>axnet</application> does things
on behalf of the user and on a true
multiuser system it really should be run with root security
(so it can read/write in each user's directory).
I would hesitate to recommend this, however, since we have no idea what
security holes this creates.
</para>
</sect2>
<sect2>
<title>Debugging <application>Applixware</application> ODBC Connections</title>
<para>
One good tool for debugging connection problems uses the Unix system
utility <application>strace</application>.
</para>
<procedure>
<title>Debugging with <command>strace</command></title>
<step performance="required">
<para>
Start <application>Applixware</application>.
</para>
</step>
<step performance="required">
<para>
Start an <application>strace</application> on
the <literal>axnet</literal> process. For example, if
<screen>
<prompt>$</prompt> <userinput>ps -aucx | grep ax</userinput>
</screen>
shows
<screen>
cary 10432 0.0 2.6 1740 392 ? S Oct 9 0:00 axnet
cary 27883 0.9 31.0 12692 4596 ? S 10:24 0:04 axmain
</screen>
</para>
<para>
Then run
<screen>
<prompt>$</prompt> <userinput>strace -f -s 1024 -p 10432</userinput>
</screen>
</para>
</step>
<step performance="required">
<para>
Check the <command>strace</command> output.
</para>
<note>
<title>Note from Cary</title>
<para>
Many of the error messages from <productname>Applixware</productname>
go to <filename>stderr</filename>,
but I'm not sure where <filename>stderr</filename>
is sent, so <command>strace</command> is the way to find out.
</para>
</note>
</step>
</procedure>
<para>
For example, after getting
a <errorname>Cannot launch gateway on server</errorname>,
I ran <command>strace</command> on <literal>axnet</literal> and got
<screen>
[pid 27947] open("/usr/lib/libodbc.so", O_RDONLY) = -1 ENOENT (No such file or directory)
[pid 27947] open("/lib/libodbc.so", O_RDONLY) = -1 ENOENT (No such file or directory)
[pid 27947] write(2, "/usr2/applix/axdata/elfodbc: can't load library 'libodbc.so'\n", 61) = -1 EIO (I/O error)
</screen>
So what is happening is that <literal>applix elfodbc</literal> is searching for <filename>libodbc.so</filename>, but it
cannot find it. That is why <filename>axnet.cnf</filename> needed to be changed.
</para>
</sect2>
<sect2>
<title>Running the <application>Applixware</application> Demo</title>
<comment>I think the condition this refers to is gone. -- petere 2002-01-07</comment>
<para>
In order to go through the
<citetitle>Applixware Data Tutorial</citetitle>, you need to create
the sample tables that the Tutorial refers to. The ELF Macro used to
create the tables tries to use a NULL condition
on many of the database columns,
and <productname>PostgreSQL</productname> does not currently allow this option.
</para>
<para>
To get around this problem, you can do the following:
</para>
<procedure>
<title>Modifying the <application>Applixware</application> Demo</title>
<step performance="required">
<para>
Copy <filename>/opt/applix/axdata/eng/Demos/sqldemo.am</filename>
to a local directory.
</para>
</step>
<step performance="required">
<para>
Edit this local copy of <filename>sqldemo.am</filename>:
</para>
<substeps>
<step performance="required">
<para>
Search for <literal>null_clause = "NULL"</literal>.
</para>
</step>
<step performance="required">
<para>
Change this to <literal>null_clause = ""</literal>.
</para>
</step>
</substeps>
</step>
<step performance="required">
<para>
Start <application>Applix Macro Editor</application>.
</para>
</step>
<step performance="required">
<para>
Open the <filename>sqldemo.am</filename> file from the <application>Macro Editor</application>.
</para>
</step>
<step performance="required">
<para>
Select <menuchoice><guimenu>File</><guimenuitem>Compile and Save</></menuchoice>.
</para>
</step>
<step performance="required">
<para>
Exit <application>Macro Editor</application>.
</para>
</step>
<step performance="required">
<para>
Start <application>Applix Data</application>.
</para>
</step>
<step performance="required">
<para>
Select <menuchoice><guimenu>*</><guimenuitem>Run Macro</guimenuitem></menuchoice>.
</para>
</step>
<step performance="required">
<para>
Enter the value <literal>sqldemo</literal>, then click <guibutton>OK</guibutton>.
</para>
<para>
You should see the progress in the status line of the data window
(in the lower left corner).
</para>
</step>
<step performance="required">
<para>
You should now be able to access the demo tables.
</para>
</step>
</procedure>
</sect2>
<sect2>
<title>Useful Macros</title>
<para>
You can add information about your
database login and password to the standard <application>Applix</application> start-up
macro file. This is an example
<filename>~/axhome/macros/login.am</filename> file:
<programlisting>
macro login
set_set_system_var@("sql_username@","tgl")
set_system_var@("sql_passwd@","no$way")
endmacro
</programlisting>
<caution>
<para>
You should be careful about the file protections on any file containing
user name and password information.
</para>
</caution>
</para>
</sect2>
</sect1>
</chapter>
<!-- Keep this comment at the end of the file
Local variables:
mode:sgml
sgml-omittag:t
sgml-shorttag:t
sgml-minimize-attributes:nil
sgml-always-quote-attributes:t
sgml-indent-step:1
sgml-indent-data:t
sgml-parent-document:nil
sgml-default-dtd-file:"./reference.ced"
sgml-exposed-tags:nil
sgml-local-catalogs:("/usr/lib/sgml/catalog")
sgml-local-ecat-files:nil
End:
-->

104
doc/src/sgml/recovery.sgml Normal file
View File

@ -0,0 +1,104 @@
<chapter Id="failure">
<title>Database Failures</title>
<para>
Database failures (or the possibility of such) must be assumed to be
lurking, ready to strike at some time in the future. A prudent
database administrator will plan for the inevitability of failures
of all possible kinds, and will have appropriate plans and
procedures in place <emphasis>before</emphasis> the failure occurs.
</para>
<para>
Database recovery is necessary in the event of hardware or software
failure. There are several categories of failures; some of these
require relatively minor adjustments to the database, while others
may depend on the existence of previously prepared database dumps
and other recovery data sets. It should be emphasized that if your
data is important and/or difficult to regenerate, then you should
have considered and prepared for various failure scenarios.
</para>
<sect1 id="failure-disk-full">
<title>Disk Filled</title>
<para>
A filled data disk may result in subsequent corruption of database
indexes, but not of the fundamental data tables. If the WAL files
are on the same disk (as is the case for a default configuration)
then a filled disk during database initialization may result in
corrupted or incomplete WAL files. This failure condition is
detected and the database will refuse to start up. You must free
up additional space on the disk (or move the WAL area to another
disk; see <xref linkend="wal-configuration">) and then restart the
<application>postmaster</application> to recover from this condition.
</para>
</sect1>
<sect1 id="failure-disk-failed">
<title>Disk Failed</title>
<para>
Failure of any disk (or of a logical storage device such as a RAID
subsystem) involved with an active database will require
that the database be recovered from a previously prepared database
dump. This dump must be prepared using
<application>pg_dumpall</application>, and updates to the database
occurring after the database installation was dumped will be lost.
</para>
</sect1>
<!--
<sect1>
<title>File Corrupted</title>
<para>
</para>
</sect1>
<sect1>
<title>Table Corrupted</title>
<para>
</para>
</sect1>
<sect1>
<title></title>
<para>
</para>
</sect1>
<sect1>
<title></title>
<para>
</para>
</sect1>
<sect1>
<title></title>
<para>
</para>
</sect1>
-->
</chapter>
<!-- Keep this comment at the end of the file
Local variables:
mode:sgml
sgml-omittag:nil
sgml-shorttag:t
sgml-minimize-attributes:nil
sgml-always-quote-attributes:t
sgml-indent-step:1
sgml-indent-data:t
sgml-parent-document: ("postgres.sgml" "set" "book" "chapter")
sgml-default-dtd-file:"./reference.ced"
sgml-exposed-tags:nil
sgml-local-ecat-files:nil
End:
-->

View File

@ -0,0 +1,7 @@
<!--
Update this file to propagate correct current version numbers to the
documentation. In text, use for example &version; to refer to them.
-->
<!entity version "7.3">
<!entity majorversion "7.3">

95
doc/src/sgml/y2k.sgml Normal file
View File

@ -0,0 +1,95 @@
<!--
$Header: /cvsroot/pgsql/doc/src/sgml/Attic/y2k.sgml,v 1.14 2002/01/08 20:03:58 momjian Exp $
-->
<sect1 id="y2k">
<title>Y2K Statement</title>
<note>
<title>Author</title>
<para>
Written by Thomas Lockhart
(<email>lockhart@fourpalms.org</email>)
on 1998-10-22. Updated 2000-03-31.
</para>
</note>
<para>
The <productname>PostgreSQL</productname> Global Development Group provides
the <productname>PostgreSQL</productname> software code tree as a public service,
without warranty and without liability for its behavior or performance.
However, at the time of writing:
</para>
<itemizedlist>
<listitem>
<para>
The author of this statement, a volunteer on the
<productname>PostgreSQL</productname>
support team since November, 1996, is not aware of
any problems in the <productname>PostgreSQL</productname> code base related
to time transitions around Jan 1, 2000 (Y2K).
</para>
</listitem>
<listitem>
<para>
The author of this statement is not aware of any reports of Y2K problems
uncovered in regression testing
or in other field use of recent or current versions
of <productname>PostgreSQL</productname>. We might have expected
to hear about problems if they existed, given the installed base and
the active participation of users on the support mailing lists.
</para>
</listitem>
<listitem>
<para>
To the best of the author's knowledge, the
assumptions <productname>PostgreSQL</productname>
makes about dates specified with a two-digit year
are documented in the current <citetitle>User's Guide</citetitle>
in the chapter on data types.
For two-digit years, the significant transition year is 1970, not 2000;
e.g. <literal>70-01-01</literal> is interpreted as 1970-01-01,
whereas <literal>69-01-01</literal> is interpreted as 2069-01-01.
</para>
</listitem>
<listitem>
<para>
Any Y2K problems in the underlying OS related to obtaining the
<quote>current time</quote> may propagate into apparent Y2K problems in
<productname>PostgreSQL</productname>.
</para>
</listitem>
</itemizedlist>
<para>
Refer to
<ulink url="http://www.gnu.org/software/year2000.html">The GNU Project</ulink>
and
<ulink url="http://language.perl.com/news/y2k.html">The Perl Institute</ulink>
for further discussion of Y2K issues, particularly
as it relates to open source, no fee software.
</para>
</sect1>
<!-- Keep this comment at the end of the file
Local variables:
mode:sgml
sgml-omittag:nil
sgml-shorttag:t
sgml-minimize-attributes:nil
sgml-always-quote-attributes:t
sgml-indent-step:1
sgml-indent-data:t
sgml-parent-document:nil
sgml-default-dtd-file:"./reference.ced"
sgml-exposed-tags:nil
sgml-local-catalogs:("/usr/lib/sgml/catalog")
sgml-local-ecat-files:nil
End:
-->

View File

@ -0,0 +1,467 @@
Tue Mar 06 12:05:00 GMT 2001 peter@retep.org.uk
- Removed org.postgresql.xa.Test from the JDBC EE driver as it's an old
test class and prevented it from compiling.
Fri Mar 02 10:00:00 GMT 2001 peter@retep.org.uk
- Fixed build.xml so that PGclob is not built in the JDBC1.2 driver
Fri Feb 17 18:25:00 GMT 2001 peter@retep.org.uk
- Removed the last deprecation warnings from the Java2 driver. Now only
the old examples give deprecation warnings.
- Added a new class into core that (JDK1.3+) ensures all connections are
closed when the VM terminates.
Fri Feb 17 15:11:00 GMT 2001 peter@retep.org.uk
- Reduced the object overhead in PreparedStatement by reusing the same
StringBuffer object throughout. Similarly SimpleDateStamp's are alse
reused in a thread save manner.
- Implemented in PreparedStatement: setNull(), setDate/Time/Timestamp
using Calendar, setBlob(), setCharacterStream()
- Clob's are now implemented in ResultSet & PreparedStatement!
- Implemented a lot of DatabaseMetaData & ResultSetMetaData methods.
We have about 18 unimplemented methods left in JDBC2 at the current
time.
Web Feb 14 17:29:00 GMT 2001 peter@retep.org.uk
- Fixed bug in LargeObject & BlobOutputStream where the stream's output
was not flushed when either the stream or the blob were closed.
- Fixed PreparedStatement.setBinaryStream() where it ignored the length
Tue Feb 13 16:33:00 GMT 2001 peter@retep.org.uk
- More TestCases implemented. Refined the test suite api's.
- Removed need for SimpleDateFormat in ResultSet.getDate() improving
performance.
- Rewrote ResultSet.getTime() so that it uses JDK api's better.
Tue Feb 13 10:25:00 GMT 2001 peter@retep.org.uk
- Added MiscTest to hold reported problems from users.
- Fixed PGMoney.
- JBuilder4/JDBCExplorer now works with Money fields. Patched Field &
ResultSet (lots of methods) for this one. Also changed cash/money to
return type DOUBLE not DECIMAL. This broke JBuilder as zero scale
BigDecimal's can't have decimal places!
- When a Statement is reused, the previous ResultSet is now closed.
- Removed deprecated call in ResultSet.getTime()
Thu Feb 08 18:53:00 GMT 2001 peter@retep.org.uk
- Changed a couple of settings in DatabaseMetaData where 7.1 now
supports those features
- Implemented the DatabaseMetaData TestCase.
Wed Feb 07 18:06:00 GMT 2001 peter@retep.org.uk
- Added comment to Connection.isClosed() explaining why we deviate from
the JDBC2 specification.
- Fixed bug where the Isolation Level is lost while in autocommit mode.
- Fixed bug where several calls to getTransactionIsolationLevel()
returned the first call's result.
Tue Feb 06 19:00:00 GMT 2001 peter@retep.org.uk
- Completed first two TestCase's for the test suite. JUnit is now
recognised by ant.
Wed Jan 31 08:46:00 GMT 2001 peter@retep.org.uk
- Some minor additions to Statement to make our own extensions more
portable.
- Statement.close() will now call ResultSet.close() rather than just
dissasociating with it.
Tue Jan 30 22:24:00 GMT 2001 peter@retep.org.uk
- Fixed bug where Statement.setMaxRows() was a global setting. Now
limited to just itself.
- Changed LargeObject.read(byte[],int,int) to return the actual number
of bytes read (used to be void).
- LargeObject now supports InputStream's!
- PreparedStatement.setBinaryStream() now works!
- ResultSet.getBinaryStream() now returns an InputStream that doesn't
copy the blob into memory first!
- Connection.isClosed() now tests to see if the connection is still alive
rather than if it thinks it's alive.
Thu Jan 25 09:11:00 GMT 2001 peter@retep.org.uk
- Added an alternative constructor to PGSQLException so that debugging
some more osteric bugs is easier. If only 1 arg is supplied and it's
of type Exception, then that Exception's stacktrace is now included.
Wed Jan 24 09:18:00 GMT 2001 peter@retep.org.uk
- Removed the 8k limit by setting it to 64k
Fri Jan 19 08:47:00 GMT 2001 peter@retep.org.uk
- Applied patch submitted by John Schutz <schutz@austin.rr.com> that
fixed a bug with ANT's SQL functions (not needed for building but nice
to have fixed).
Thu Jan 18 17:30:00 GMT 2001 peter@retep.org.uk
- Added new error message into errors.properties "postgresql.notsensitive"
This is used by jdbc2.ResultSet when a method is called that should
fetch the current value of a row from the database refreshRow() for
example.
- These methods no longer throw the not implemented but the new noupdate
error. This is in preparation for the Updateable ResultSet support
which will overide these methods by extending the existing class to
implement that functionality, but needed to show something other than
notimplemented:
moveToCurrentRow()
moveToInsertRow()
rowDeleted()
rowInserted()
all update*() methods, except those that took the column as a String
as they were already implemented to convert the String to an int.
- getFetchDirection() and setFetchDirection() now throws
"postgresql.notimp" as we only support one direction.
The CursorResultSet will overide this when its implemented.
- Created a new class under jdbc2 UpdateableResultSet which extends
ResultSet and overides the relevent update methods.
This allows us to implement them easily at a later date.
- In jdbc2.Connection, the following methods are now implemented:
createStatement(type,concurrency);
getTypeMap();
setTypeMap(Map);
- The JDBC2 type mapping scheme almost complete, just needs SQLInput &
SQLOutput to be implemented.
- Removed some Statement methods that somehow appeared in Connection.
- In jdbc2.Statement()
getResultSetConcurrency()
getResultSetType()
setResultSetConcurrency()
setResultSetType()
- Finally removed the old 6.5.x driver.
Thu Jan 18 12:24:00 GMT 2001 peter@retep.org.uk
- These methods in org.postgresql.jdbc2.ResultSet are now implemented:
getBigDecimal(int) ie: without a scale (why did this get missed?)
getBlob(int)
getCharacterStream(int)
getConcurrency()
getDate(int,Calendar)
getFetchDirection()
getFetchSize()
getTime(int,Calendar)
getTimestamp(int,Calendar)
getType()
NB: Where int represents the column name, the associated version
taking a String were already implemented by calling the int
version.
- These methods no longer throw the not implemented but the new noupdate
error. This is in preparation for the Updateable ResultSet support
which will overide these methods by extending the existing class to
implement that functionality, but needed to show something other than
notimplemented:
cancelRowUpdates()
deleteRow()
- Added new error message into errors.properties "postgresql.noupdate"
This is used by jdbc2.ResultSet when an update method is called and
the ResultSet is not updateable. A new method notUpdateable() has been
added to that class to throw this exception, keeping the binary size
down.
- Added new error message into errors.properties "postgresql.psqlnotimp"
This is used instead of unimplemented when it's a feature in the
backend that is preventing this method from being implemented.
- Removed getKeysetSize() as its not part of the ResultSet API
Thu Jan 18 09:46:00 GMT 2001 peter@retep.org.uk
- Applied modified patch from Richard Bullington-McGuire
<rbulling@microstate.com>. I had to modify it as some of the code
patched now exists in different classes, and some of it actually
patched obsolete code.
Wed Jan 17 10:19:00 GMT 2001 peter@retep.org.uk
- Updated Implementation to include both ANT & JBuilder
- Updated README to reflect the changes since 7.0
- Created jdbc.jpr file which allows JBuilder to be used to edit the
source. JBuilder _CAN_NOT_ be used to compile. You must use ANT for
that. It's only to allow JBuilders syntax checking to improve the
drivers source. Refer to Implementation for more details
Wed Dec 20 16:19:00 GMT 2000 peter@retep.org.uk
- Finished build.xml and updated Driver.java.in and buildDriver to
match how Makefile and ANT operate.
Tue Dec 19 17:30:00 GMT 2000 peter@retep.org.uk
- Finally created ant build.xml file
Mon Nov 20 08:12:00 GMT 2000 peter@retep.org.uk
- Encoding patch to Connection by wrobell@posexperts.com.pl
Tue Oct 17 15:35:00 BST 2000 petermount@maidstone.gov.uk
- Changed getTimestamp() again. This time Michael Stephenson's
<mstephenson@tirin.openworld.co.uk> solution looked far better
than the original solution put in June.
Tue Oct 10 13:12:00 BST 2000 peter@retep.org.uk
- DatabaseMetaData.supportsAlterTableWithDropColumn() as psql doesn't
support dropping of individual columns
- Merged in some last patches. Only 1 left, which may not be compatible
with jdbc1
- Merged in my old retepsql project. Makefile now includes it.
Mon Oct 02 12:30:00 BST 2000 peter@retep.org.uk
- Merged in byte[] array allocation changes submitted by Gunnar R|nning
<gunnar@candleweb.no>
Mon Sep 25 14:22:00 BST 2000 peter@retep.org.uk
- Removed the DriverClass kludge. Now the org.postgresql.Driver class
is compiled from a template file, and now has both the connection
class (ie jdbc1/jdbc2) and the current version's from Makefile.global
Thu Jul 20 16:30:00 BST 2000 petermount@it.maidstone.gov.uk
- Fixed DatabaseMetaData.getTableTypes()
Tue Jun 06 12:00:00 BST 2000 petermount@it.maidstone.gov.uk
- Added org/postgresql/DriverClass.java to the list of files removed
by make clean (it's dynamically built)
- Fixed Statement, so that the update count is valid when an SQL
DELETE operation is done.
- While fixing the update count, made it easier to get the OID of
the last insert as well. Example is in example/basic.java
Tue Jun 06 08:37:00 BST 2000 petermount@it.maidstone.gov.uk
- Removed a hardwired 8K limit on query strings
- Added some missing org.'s in Connection that prevented
the use of the geometric types.
Thu Jun 01 07:26:00 BST 2000 petermount@it.maidstone.gov.uk
- Removed timezone in getTimestamp() methods in ResultSet.
Mon May 15 22:30:00 BST 2000 peter@retep.org.uk
- Fixed the message Makefile produces after compiling. It still said
about the old Driver class, not the new package. Spotted by
Joseph Shraibman <jks@p1.selectacast.net>
Thu May 04 11:38:00 BST 2000 petermount@it.maidstone.gov.uk
- Corrected incorrect date in CHANGELOG
- Fixed the ImageViewer example
Wed May 03 16:47:00 BST 2000 petermount@it.maidstone.gov.uk
- Fixed the Makefile so that postgresql.jar is built everytime
the jdbc1 or jdbc2 rules are called.
- Fixed the threadsafe example. It had problems with autocommit
Wed May 03 14:32:00 BST 2000 petermount@it.maidstone.gov.uk
- Rewrote the README file (the old one was 18 months old!)
- Added @deprecated tags to org.postgresql.jdbc2.ResultSet
to clear some warnings issued during compilation.
Wed Apr 12 22:14:00 BST 2000 peter@retep.org.uk
- Implemented the JDBC2 Blob interface, and ResultSet.getBlob().
Wed Apr 12 20:20:00 BST 2000 peter@retep.org.uk
- Fixed bug in ResultSet.absolute(). Negative rows are now supported.
- Implemented ResultSet.relative(), afterLast().
Tue Feb 1 21:40:00 GMT 2000 peter@retep.org.uk
- Finally imported the contributed javax extensions by Assaf Arkin
arkin@exoffice.com
Mon Jan 24 21:00:00 GMT 2000 peter@retep.org.uk
- Finally introduced the 7.0 additions to the core CVS repository.
- All source files are now under the org.postgresql package (previously
they were under postgresql). The package lines now changed
accordingly.
- The Makefile was rewritten so it should now work on machines that
can't handle the $( ) syntax.
- Dutch translation by Arnout Kuiper (ajkuiper@wxs.nl)
Mon Sep 13 23:56:00 BST 1999 peter@retep.org.uk
- PG_Stream.SendChar() optimised, increased default buffer size of
output stream to 8k, and introduced an 8k buffer on the input stream
Sverre H Huseby <sverrehu@online.no>
- Added a finalize() method to Connection class in both drivers so that
the connection to the backend is really closed.
- Due to many JVM's not returning a meaningful value for java.version
the decision for building the JDBC1.2 or JDBC2 driver is now a
compile time option.
- Replaced $$(cmd...) with `cmd...` in the Makefile. This should allow
the driver to compile when using shells other than Bash.
Thu Sep 9 01:18:39 MEST 1999 jens@jens.de
- fixed bug in handling of DECIMAL type
Wed Aug 4 00:25:18 CEST 1999 jens@jens.de
- updated ResultSetMetaData.getColumnDisplaySize() to return
the actual display size
- updated driver to use postgresql FE/BE-protocol version 2
Mon Aug 2 03:29:35 CEST 1999 jens@jens.de
- fixed bug in DatabaseMetaData.getPrimaryKeys()
Sun Aug 1 18:05:42 CEST 1999 jens@jens.de
- added support for getTransactionIsolation and setTransactionIsolation
Sun Jun 27 12:00:00 BST 1999
- Fixed typo in postgresql.Driver that prevented compilation
- Implemented getTimestamp() fix submitted by Philipp Matthias Hahn
<pmhahn@titan.lahn.de>
- Cleaned up some comments in Connection
Wed Jun 23 06:50:00 BST 1999
- Fixed error in errors.properties where the arguments are 0 based not
1 based
- Fixed bug in postgresql.Driver where exception is thrown, then
intercepted rather than being passed to the calling application.
- Removed the file postgresql/CallableStatement, as it's not used and
really exists in the jdbc1 & jdbc2 sub packages only.
Wed May 19 00:20:00 BST 1999
- Internationalisation now done. Surprising that there's 68 error
messages in the driver ;-)
Tue May 18 07:00:00 BST 1999
- Set the ImageViewer application to use transactions
Tue May 18 00:00:00 BST 1999
- Just after committing, I realised why internationalisation isn't
working. This is now fixed (in the Makefile).
Mon May 17 23:40:00 BST 1999
- PG_Stream.close() now attempts to send the close connection message
to the backend before closing the streams
- Added batch support in the JDBC2, supplied by Yutaka Tanida
<yutaka@marin.or.jp>
- Removed the old datestyle code. Now the driver uses only ISO.
- Removed some files in the postgresql directory still in CVS that were
moved since 6.4.x (DatabaseMetaData.java PreparedStatement.java
ResultSetMetaData.java Statement.java)
- Internationalisation of the error messages is partially implemented,
however it's not enabled as it only works when the jar file is
_not_ used, and work needs to be done.
Sun Apr 11 17:00:00 BST 1999
- getUpdateCount() now returns the actual update count (before it
simply returned 1 for everything).
- added some updates to example.basic so it would test the new update
count code.
- corrected typo in a comment in Statement.java
Mon Jan 25 19:45:00 GMT 1999
- created subfolders example/corba and example/corba/idl to hold the
new example showing how to hook CORBA and PostgreSQL via JDBC
- implemented some JDBC2 methods curtesy of Joachim.Gabler@t-online.de
Sat Jan 23 10:30:00 GMT 1999
- Changed imports in postgresql.jdbc1.ResultSetMetaData as for some
reason it didn't want to compile under jdk1.1.6
Tue Dec 29 15:45:00 GMT 1998
- Refreshed the README (which was way out of date)
Tue Dec 29 15:45:00 GMT 1998
- Finished adding the additional methods into the JDBC2 driver.
- Had to add some explicit package references for the JDK1.2 Javac to
cope with the driver
Tue Dec 29 12:40:00 GMT 1998
- Fixed package imports and some references to java.sql.ResultSet in
various files. Compiled and tested the JDBC1 driver.
Mon Dec 28 19:01:37 GMT 1998
- created a new package postgresql.jdbc2 which will contain the JDBC 2
specific classes. A similar new package (postgresql.jdbc1) has been
created to hold the JDBC 1 specific classes.
- modified Makefile to allow compilation of the JDBC 1 & 2 drivers,
with the possibility of building a dual-spec driver.
- changed the version number in postgresql.Driver to 6.5
- modified postgresql.Driver class to initiate the correct driver when
used under a 1.1 or 1.2+ JVM.
- postgresql.Connection and postgresql.jdbc2.Connection now extends the
new class postgresql.ConnectionStub, which allows us to dynamically
open the JDBC1 or JDBC2 drivers.
- enabled compilation of the driver under Win32 when using the Make
from the CygWin package (Cygnus B20.1 was used).
- To make future development easier (now we have 2 specifications to
work with) the following classes have moved from the postgresql to
the postgresql.jdbc1 package:
CallableStatement Connection
DatabaseMetaData PreparedStatement
ResultSet ResultSetMetaData
Statement
Some of these classes have common code that is not dependent on
either JDBC specification. These common code are still in the
postgresql package.
Ie: postgresql.jdbc1.Connection extends postgresql.Connection
and postgresql.jdbc2.Connection extends postgresql.Connection
Web Oct 7 22:00:00 BST 1998
- removed syncronised from Connection.ExecSQL(). See next entry.
- added new syncronised locking in the Connection.ExecSQL() and
FastPath.fastpath() methods. They now lock against the PG_Steam
object for the connection, which now provides full Thread Safety.
- Reposted ChangeLog as it's missing from CVS.
Modifications done since 6.3.2 was released and Sun Aug 30 11:33:06 BST 1998
- Fixed PreparedStatement.setObject as it didn't handle shorts
- ResultSet.getDate() now handles null dates (returns null ratrher
than a NullPointerException)
- ResultSetMetaData.getPrecision() new returns 0 for VARCHAR
- Field now caches the typename->oid in a Hashtable to speed things
up. It removes the need for some unnecessary queries to the backend.
- PreparedStatement.toString() now returns the SQL statement that it
will send to the backend. Before it did nothing.
- DatabaseMetaData.getTypeInfo() now does something.
- Connection now throws an exception if either of the user or password
properties are missing, as they are required for JDBC to work.
This occasionally occurs when the client uses the properties version
of getConnection(), and is a common question on the email lists.
Sun Aug 30 11:33:06 BST 1998
- Created ChangeLog file, and entered stuff done since 6.3.2 and today
- Change version number to 6.4 in Driver.java
- Added fix to DatabaseMetaData.getTables() submitted by
Stefan Andreasen <stefan@linux.kapow.dk>
- Added fix to DatabaseMetaData.getColumns() to handle patterns
submitted by Stefan Andreasen <stefan@linux.kapow.dk>
- Set TcpNoDelay on the connection, as this gives us a 10x speed
improvement on FreeBSD (caused by a bug in their TCP Stack). They
should fix the bug before 6.4 is released, but will keep this
in here unless it causes more problems.
Submitted by Jason Venner <jason@idiom.com>
- Removed a duplicate definition of fieldCache
- Added a more meaningful message when the connection is refused. It
now says:
Connection refused. Check that the hostname and port is
correct, and that the postmaster is running with the -i flag,
which enables TCP/IP networking.
- Removed kludge in PreparedStatement.setDate() that acted as a
temporary fix to a bug in SimpleDateFormat, as it broke date
handling in JDK 1.1.6.
- Modified PG_Stream and Connection, so that outbound data is now
buffered. This should give us a speed improvement, and reduce the
ammount of network packets generated.
- Removed duplicate code and optimised PG_Stream.
- PG_Stream now returns a more meaningful message when the connection
is broken by the backend. It now returns:
The backend has broken the connection. Possibly the action you
have attempted has caused it to close.
- Removed obsolete code from Connection.
- The error message returned when the authentication scheme is unknown
has been extended. It now reads:
Authentication type ### not supported. Check that you have
configured the pg_hba.conf file to include the client's IP
address or Subnet, and is using a supported authentication
scheme.
- Connection.getMetaData() now caches the instance returned, so
multiple calls will return the same instance.
- Created a test application that tests the DatabaseMetaData and
ResultSetMetaData classes.
- Replaced getString(#).getBytes() with getBytes(#) which should speed
things up, and reduce memory useage.
- Optimised DatabaseMetaData.getProcedures(), and implemented patterns
- Fixed NullPointerExceptions thrown when a field is null (Internal
to the driver, not caused by results from the backend.
DatabaseMetaData.getProcedures() is an example of a method that
causes this):
- ResultSetMetaData.getColumnName() now returns field# where
# is the column name.
- ResultSet.getObject() fixed
- Fixed bug in psql example that was affected by null fields
- DatabaseMetaData.getTables()
- DatabaseMetaData.getPrimaryKeys() ran a query with an ambiguous field
fixed.
- getTypeInfo() optimised to increase speed and reduce memory useage
- ResultSetMetaData.isCurrency() optimised and is now smaller.
- Removed unnecessary code fromResultSetMetaData.getCatalogName()
and getSchemaName().
- Created new class postgresql.util.PGmoney to map the money type
- Created new class postgresql.geometric.PGline to map the line type

View File

@ -0,0 +1,199 @@
This short document is provided to help programmers through the internals of
the PostgreSQL JDBC driver.
Last update: January 17 2001 peter@retep.org.uk
build.xml
---------
As of 7.1, we now use the ANT build tool to build the driver. ANT is part of
the Apache/Jakarta project, and provides far superior build capabilities. You
can find ANT from http://jakarta.apache.org/ant/index.html and being pure java
it will run on any java platform.
So far I've tested it under JDK1.2.x & JDK1.3 (both Linux & NT) but not yet with
JDK1.1.8. Because of the latter the Makefile still works for now, but should be
gone for 7.2.
Anyhow, to build, simply type ant and the .jar file will be created and put into
the jars directory.
Tip: If you run ant from the sources root directory (ie: where the configure
script is located) you will find another build.xml file. It is advised to run
ant from that directory as it will then compile some auxilary Java/JDBC
utilities that are located under the /contrib/retep directory.
Makefile
--------
Prior to 7.1, all compilation must be done by using Make. This is because there
are three versions of the driver, one for JDBC1 (for JDK 1.1.x) and the others
for JDBC2 (for JDK 1.2 or later, one standard and one enterprise).
As of 7.1, ANT is the build tool of choice. Just compare Makefile and build.xml
to see why! Make just isn't suited to Java.
Building with just the JDK
--------------------------
This is not advised, simply because you have to make sure you include the
correct classes, and the fact that org.postgresql.Driver is built on the fly.
Also, javac won't pick up all the classes because some (org.postgresql.geometric
for example) are loaded dynamically.
org/postgresql/Driver.java.in
-----------------------------
Because there are three versions of the driver, the org.postgresql.Driver class
is built dynamically. To build correctly ANT copies the Driver.java.in file to
Driver.java replacing certain values according to the required driver.
The replaced values are of the format %VALUE%, ie: %MAJORVERSION% is replaced
with 7 in the 7.1 version of the driver.
postgresql.jar
--------------
This jar file is produced by ANT, and contains the driver for your JDK platform.
If you downloaded a precompiled binary from the web, you may find that the
jar file will be named differently. These are identical to this file but are
named according to the backend and jdk versions.
The naming convention is of the form: jdbc-#.#-#.##.jar
ie: for 7.1
jdbc-7.1-1.1.jar JDBC Driver for JDK1.1.8
jdbc-7.1-1.2.jar JDBC Driver for JDK1.2 & JDK1.3
jdbc-7.1-1.2ent.jar JDBC Driver for JDK1.2 & JDK1.3 Enterprise Editions
If in the future there are any 1.3 specific classes then there will be two new
jar files.
Note: All the precompiled binaries are built under Linux.
jdbc.jpx
--------
This is a JBuilder4 project file. It's here to allow JBuilder to be used to
develop the driver. Mainly for it's Editor's features like syntax checking and
auto-completion etc.
IMPORTANT: You CAN NOT build the driver from within JBuilder. You must use ANT.
This is because of the three versions of the JDK. If you try to use
JBuilder, it will try to build everything, and it will just not work.
Importing packages
------------------
In user code, you may have to import one or more packages, if and only if you
are using the non jdbc extensions (like FastPath, or LargeObject).
DO NOT import the postgresql, postgresql.jdbc1 or postgresql.jdbc2 packages!
Internally, some classes will import the packages when there is a link between
them and the other packages. However, the above rule still applies. It's there
because Javac becomes confused between the different places that similar class
names are present.
However, there are places where they need to refer to classes in the postgresql
package. In this case, import the individual classes, and not the entire
package.
ie: import postgresql.Field
NOT import postgresql.*
Package Layout
--------------
The driver is split into several packages:
org.postgresql core classes that can be accessed by user code
org.postgresql.core core classes not normally used externally
org.postgresql.jdbc1 classes used only in implementing JDBC 1
org.postgresql.jdbc2 classes used only in implementing JDBC 2
org.postgresql.fastpath FastPath to backend functions
org.postgresql.geometric 2D Geometric types mapped to Java Objects
org.postgresql.largeobject Low level Large Object access
org.postgresql.util Utility classes
Package org.postgresql
------------------
This package holds the core classes.
Driver registers the driver when it's loaded, and determines which
Connection class (in jdbc1 or jdbc2 packages) to use when
connecting to a database.
Field Used internally to represent a Field
PG_Stream Used internally to manage the network stream.
PostgresqlDataSource
Exists in the Java2 Enterprise edition driver only and is the
enterprise equivalent to Driver
These classes contains common code that is not dependent to the
two JDBC specifications.
Connection Common code used in Connections, mainly Network Protocol stuff.
ResultSet Common code used in ResultSet's
Package org.postgresql.core
-----------------------
New in 7.1, this is where core classes (common to all versions) will exist. Any
new class that would have gone into org.postgresql must go in here instead.
BytePoolDim1 Handles a pool of byte[] arrays.
BytePoolDim2 Handles a pool of byte[][] arrays
MemoryPool Interface for managing MemoryPools. Not used (yet).
ObjectPool Interface for an Object Pool
SimpleObjectPool Class that implements ObjectPool and used by BytePoolDim#
Encoding Character encoding logic, mainly for Connection and PG_Stream.
Package org.postgresql.fastpath
---------------------------
Fastpath Handles executing a function on the PostgreSQL Backend
FastpathArg Defines an argument for a function call
Package org.postgresql.geometric
----------------------------
PGbox Maps to postgresql type box
PGcircle Maps to postgresql type circle
PGline Maps to postgresql type line
PGlseg Maps to postgresql type lseg
PGpath Maps to postgresql type path
PGpoint Maps to postgresql type point
PGpolygon Maps to postgresql type polygon
Package org.postgresql.jdbc1
------------------------
The classes in this package handle the JDBC 1 Specification, for JDK 1.1.x
All interfaces in the java.sql package are present here.
Package org.postgresql.jdbc2
------------------------
The classes in this package handle the JDBC 2 Specification, for JDK 1.2
All interfaces in the java.sql, and javax.sql packages are present here.
Package org.postgresql.largeobject
------------------------------
LargeObject Represents an open LargeObject
LargeObjectManager Handles the opening and deleting of LargeObjects
Package org.postgresql.util
-----------------------
PGmoney Maps to postgresql type money
PGobject Used to represent postgresql types that have no Java equivalent
PGtokenizer Helper class for the geometric types
Serialize Used to serialise Java objects into tabes, rather than Blobs
UnixCrypt Used to handle crypt authentication

View File

@ -0,0 +1,31 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--JBuilder XML Project-->
<project>
<property category="runtime.0" name="RunnableType" value="com.borland.jbuilder.runtime.ApplicationRunner" />
<property category="runtime.0" name="jsprunner.docbase" value="." />
<property category="runtime.0" name="jsprunner.jspfile" value="E%|/docs/java/xml/example6" />
<property category="sys" name="BackupPath" value="bak" />
<property category="sys" name="CheckStable" value="1" />
<property category="sys" name="Company" value="" />
<property category="sys" name="Copyright" value="Copyright (c) 2001" />
<property category="sys" name="DefaultPackage" value="org.postgresql.core" />
<property category="sys" name="Description" value="" />
<property category="sys" name="DocPath" value="doc" />
<property category="sys" name="ExcludeClassEnabled" value="0" />
<property category="sys" name="JDK" value="java 1.3.0-C" />
<property category="sys" name="LastTag" value="0" />
<property category="sys" name="Libraries" value="JUnit" />
<property category="sys" name="MakeStable" value="0" />
<property category="sys" name="OutPath" value="build" />
<property category="sys" name="SourcePath" value="." />
<property category="sys" name="Title" value="" />
<property category="sys" name="Version" value="1.0" />
<property category="sys" name="WorkingDirectory" value="." />
<node type="Package" name="org.postgresql.core" />
<file path="build.xml" />
<file path="CHANGELOG" />
<file path="Implementation" />
<file path="README" />
<file path="org/postgresql/jdbc2/UpdateableResultSet.java" />
</project>

View File

@ -0,0 +1,74 @@
package utils;
/*
* This little app checks to see what version of JVM is being used.
* It does this by checking first the java.vm.version property, and
* if that fails, it looks for certain classes that should be present.
*/
public class CheckVersion
{
/*
* Check for the existence of a class by attempting to load it
*/
public static boolean checkClass(String c)
{
try
{
Class.forName(c);
}
catch (Exception e)
{
return false;
}
return true;
}
/*
* This first checks java.vm.version for 1.1, 1.2 or 1.3.
*
* It writes jdbc1 to stdout for the 1.1.x VM.
*
* For 1.2 or 1.3, it checks for the existence of the javax.sql.DataSource
* interface, and if found writes enterprise to stdout. If the interface
* is not found, it writes jdbc2 to stdout.
*
* PS: It also looks for the existence of java.lang.Byte which appeared in
* JDK1.1.0 incase java.vm.version is not heeded by some JVM's.
*
* If it can't work it out, it writes huho to stdout.
*
* The make file uses the written results to determine which rule to run.
*
* Bugs: This needs thorough testing.
*/
public static void main(String args[])
{
String vmversion = System.getProperty("java.vm.version");
System.out.println("postgresql.jdbc=" + System.getProperty("postgresql.jdbc"));
// We are running a 1.1 JVM
if (vmversion.startsWith("1.1"))
{
System.out.println("jdbc1");
//System.exit(0);
}
else
// We are running a 1.2 or 1.3 JVM
if (vmversion.startsWith("1.2") ||
vmversion.startsWith("1.3") ||
checkClass("java.lang.Byte")
)
{
// Check to see if we have the standard extensions. If so, then
// we want the enterprise edition, otherwise the jdbc2 driver.
if (checkClass("javax.sql.DataSource"))
System.out.println("enterprise");
else
System.out.println("jdbc2");
//System.exit(0);
}
System.setProperty("postgresql.jdbc", "yoyo");
}
}

View File

@ -0,0 +1,47 @@
#!/bin/sh
#
# $Id: buildDriver,v 1.2 2000/12/20 16:22:49 peter Exp $
#
# This script generates the org/postgresql/Driver.java file from the template
# org/postgresql/Driver.java.in
#
# We do this because we need to include the version number from Makefile.global
# and some other goodies.
#
# This used to be in Makefile, but as it's now done three times, it's better
# to have it as a separate script.
#
# If you have any problems, please let us know ;-)
#
# Syntax: buildDriver version class
#
# Where:
# version The version string from Makefile.global
# class The class implementing java.sql.Connection
# edition The driver edition being built
# source The file to build. We assume that ${source}.in exists
#
VERSION=$1
CLASS=$2
EDITION=$3
SOURCE=$4
#---------------------------------------------------------------------------
# Extract the version. This will work until version x.9 (and assuming we don't
# have 7.10 etc). We only handle 1 digit for MINORVERSION to handle things like
# 7.1devel etc
#
MAJORVERSION=`echo $VERSION | cut -f1 -d'.'`
MINORVERSION=`echo $VERSION | cut -f2 -d'.' | cut -c1`
#---------------------------------------------------------------------------
# Now finally build the driver
sed \
-e "s/@JDBCCONNECTCLASS@/$CLASS/g" \
-e "s/@VERSION@/$VERSION $EDITION/g" \
-e "s/@MAJORVERSION@/$MAJORVERSION/g" \
-e "s/@MINORVERSION@/$MINORVERSION/g" \
<${SOURCE}.in \
>$SOURCE
#---------------------------------------------------------------------------

View File

@ -0,0 +1,23 @@
#!/bin/perl
while(<>) {
chomp();
s/\t+/ /g;
if(substr($_,0,3) eq ' - ') {
print "<ul>" if !$inlist;
$inlist=1;
print "<li>".substr($_,3)."\n";
} else {
if($_ eq "" || $_ eq " ") {
print "</ul>" if $inlist;
$inlist=0;
print "<br>\n";
} elsif(substr($_,0,1) eq " ") {
print $_;
} else {
print "</ul>" if $inlist;
$inlist=0;
print "<h4>".$_."</h4>\n";
}
}
}

View File

@ -0,0 +1,532 @@
--
-- GEOMETRY
--
--
-- Points
--
SELECT '' AS four, center(f1) AS center
FROM BOX_TBL;
four | center
------+---------
| (1,1)
| (2,2)
| (2.5,3)
| (3,3)
(4 rows)
SELECT '' AS four, (@@ f1) AS center
FROM BOX_TBL;
four | center
------+---------
| (1,1)
| (2,2)
| (2.5,3)
| (3,3)
(4 rows)
SELECT '' AS six, point(f1) AS center
FROM CIRCLE_TBL;
six | center
-----+-----------
| (0,0)
| (1,2)
| (1,3)
| (1,2)
| (100,200)
| (100,0)
(6 rows)
SELECT '' AS six, (@@ f1) AS center
FROM CIRCLE_TBL;
six | center
-----+-----------
| (0,0)
| (1,2)
| (1,3)
| (1,2)
| (100,200)
| (100,0)
(6 rows)
SELECT '' AS two, (@@ f1) AS center
FROM POLYGON_TBL
WHERE (# f1) > 2;
two | center
-----+-------------------------------------
| (1.33333333333333,1.33333333333333)
| (2.33333333333333,1.33333333333333)
(2 rows)
-- "is horizontal" function
SELECT '' AS two, p1.f1
FROM POINT_TBL p1
WHERE ishorizontal(p1.f1, point '(0,0)');
two | f1
-----+---------
| (0,0)
| (-10,0)
(2 rows)
-- "is horizontal" operator
SELECT '' AS two, p1.f1
FROM POINT_TBL p1
WHERE p1.f1 ?- point '(0,0)';
two | f1
-----+---------
| (0,0)
| (-10,0)
(2 rows)
-- "is vertical" function
SELECT '' AS one, p1.f1
FROM POINT_TBL p1
WHERE isvertical(p1.f1, point '(5.1,34.5)');
one | f1
-----+------------
| (5.1,34.5)
(1 row)
-- "is vertical" operator
SELECT '' AS one, p1.f1
FROM POINT_TBL p1
WHERE p1.f1 ?| point '(5.1,34.5)';
one | f1
-----+------------
| (5.1,34.5)
(1 row)
--
-- Line segments
--
-- intersection
SELECT '' AS count, p.f1, l.s, l.s # p.f1 AS intersection
FROM LSEG_TBL l, POINT_TBL p;
ERROR: Unable to identify an operator '#' for types 'lseg' and 'point'
You will have to retype this query using an explicit cast
-- closest point
SELECT '' AS thirty, p.f1, l.s, p.f1 ## l.s AS closest
FROM LSEG_TBL l, POINT_TBL p;
thirty | f1 | s | closest
--------+------------+-------------------------------+---------------------------------------
| (0,0) | [(1,2),(3,4)] | (1,2)
| (-10,0) | [(1,2),(3,4)] | (1,2)
| (-3,4) | [(1,2),(3,4)] | (1,2)
| (5.1,34.5) | [(1,2),(3,4)] | (3,4)
| (-5,-12) | [(1,2),(3,4)] | (1,2)
| (10,10) | [(1,2),(3,4)] | (3,4)
| (0,0) | [(0,0),(6,6)] | (-0,0)
| (-10,0) | [(0,0),(6,6)] | (0,0)
| (-3,4) | [(0,0),(6,6)] | (0.5,0.5)
| (5.1,34.5) | [(0,0),(6,6)] | (6,6)
| (-5,-12) | [(0,0),(6,6)] | (0,0)
| (10,10) | [(0,0),(6,6)] | (6,6)
| (0,0) | [(10,-10),(-3,-4)] | (-2.04878048780488,-4.4390243902439)
| (-10,0) | [(10,-10),(-3,-4)] | (-3,-4)
| (-3,4) | [(10,-10),(-3,-4)] | (-3,-4)
| (5.1,34.5) | [(10,-10),(-3,-4)] | (-3,-4)
| (-5,-12) | [(10,-10),(-3,-4)] | (-1.60487804878049,-4.64390243902439)
| (10,10) | [(10,-10),(-3,-4)] | (2.39024390243902,-6.48780487804878)
| (0,0) | [(-1000000,200),(300000,-40)] | (0.0028402365895872,15.384614860264)
| (-10,0) | [(-1000000,200),(300000,-40)] | (-9.99715942258202,15.3864610140472)
| (-3,4) | [(-1000000,200),(300000,-40)] | (-2.99789812267519,15.3851688427303)
| (5.1,34.5) | [(-1000000,200),(300000,-40)] | (5.09647083221496,15.3836744976925)
| (-5,-12) | [(-1000000,200),(300000,-40)] | (-4.99494420845634,15.3855375281616)
| (10,10) | [(-1000000,200),(300000,-40)] | (10.000993741978,15.3827690473092)
| (0,0) | [(11,22),(33,44)] | (11,22)
| (-10,0) | [(11,22),(33,44)] | (11,22)
| (-3,4) | [(11,22),(33,44)] | (11,22)
| (5.1,34.5) | [(11,22),(33,44)] | (14.3,25.3)
| (-5,-12) | [(11,22),(33,44)] | (11,22)
| (10,10) | [(11,22),(33,44)] | (11,22)
(30 rows)
--
-- Lines
--
--
-- Boxes
--
SELECT '' as six, box(f1) AS box FROM CIRCLE_TBL;
six | box
-----+----------------------------------------------------------------------------
| (2.12132034355964,2.12132034355964),(-2.12132034355964,-2.12132034355964)
| (71.7106781186547,72.7106781186547),(-69.7106781186547,-68.7106781186547)
| (4.53553390593274,6.53553390593274),(-2.53553390593274,-0.535533905932737)
| (3.12132034355964,4.12132034355964),(-1.12132034355964,-0.121320343559642)
| (107.071067811865,207.071067811865),(92.9289321881345,192.928932188135)
| (170.710678118655,70.7106781186547),(29.2893218813453,-70.7106781186547)
(6 rows)
-- translation
SELECT '' AS twentyfour, b.f1 + p.f1 AS translation
FROM BOX_TBL b, POINT_TBL p;
twentyfour | translation
------------+-------------------------
| (2,2),(0,0)
| (-8,2),(-10,0)
| (-1,6),(-3,4)
| (7.1,36.5),(5.1,34.5)
| (-3,-10),(-5,-12)
| (12,12),(10,10)
| (3,3),(1,1)
| (-7,3),(-9,1)
| (0,7),(-2,5)
| (8.1,37.5),(6.1,35.5)
| (-2,-9),(-4,-11)
| (13,13),(11,11)
| (2.5,3.5),(2.5,2.5)
| (-7.5,3.5),(-7.5,2.5)
| (-0.5,7.5),(-0.5,6.5)
| (7.6,38),(7.6,37)
| (-2.5,-8.5),(-2.5,-9.5)
| (12.5,13.5),(12.5,12.5)
| (3,3),(3,3)
| (-7,3),(-7,3)
| (0,7),(0,7)
| (8.1,37.5),(8.1,37.5)
| (-2,-9),(-2,-9)
| (13,13),(13,13)
(24 rows)
SELECT '' AS twentyfour, b.f1 - p.f1 AS translation
FROM BOX_TBL b, POINT_TBL p;
twentyfour | translation
------------+---------------------------
| (2,2),(0,0)
| (12,2),(10,0)
| (5,-2),(3,-4)
| (-3.1,-32.5),(-5.1,-34.5)
| (7,14),(5,12)
| (-8,-8),(-10,-10)
| (3,3),(1,1)
| (13,3),(11,1)
| (6,-1),(4,-3)
| (-2.1,-31.5),(-4.1,-33.5)
| (8,15),(6,13)
| (-7,-7),(-9,-9)
| (2.5,3.5),(2.5,2.5)
| (12.5,3.5),(12.5,2.5)
| (5.5,-0.5),(5.5,-1.5)
| (-2.6,-31),(-2.6,-32)
| (7.5,15.5),(7.5,14.5)
| (-7.5,-6.5),(-7.5,-7.5)
| (3,3),(3,3)
| (13,3),(13,3)
| (6,-1),(6,-1)
| (-2.1,-31.5),(-2.1,-31.5)
| (8,15),(8,15)
| (-7,-7),(-7,-7)
(24 rows)
-- scaling and rotation
SELECT '' AS twentyfour, b.f1 * p.f1 AS rotation
FROM BOX_TBL b, POINT_TBL p;
twentyfour | rotation
------------+-----------------------------
| (0,0),(0,0)
| (-0,0),(-20,-20)
| (-0,2),(-14,0)
| (0,79.2),(-58.8,0)
| (14,-0),(0,-34)
| (0,40),(0,0)
| (0,0),(0,0)
| (-10,-10),(-30,-30)
| (-7,3),(-21,1)
| (-29.4,118.8),(-88.2,39.6)
| (21,-17),(7,-51)
| (0,60),(0,20)
| (0,0),(0,0)
| (-25,-25),(-25,-35)
| (-17.5,2.5),(-21.5,-0.5)
| (-73.5,104.1),(-108,99)
| (29.5,-42.5),(17.5,-47.5)
| (0,60),(-10,50)
| (0,0),(0,0)
| (-30,-30),(-30,-30)
| (-21,3),(-21,3)
| (-88.2,118.8),(-88.2,118.8)
| (21,-51),(21,-51)
| (0,60),(0,60)
(24 rows)
SELECT '' AS twenty, b.f1 / p.f1 AS rotation
FROM BOX_TBL b, POINT_TBL p
WHERE (p.f1 <-> point '(0,0)') >= 1;
twenty | rotation
--------+-----------------------------------------------------------------------------------
| (0,-0),(-0.2,-0.2)
| (-0.1,-0.1),(-0.3,-0.3)
| (-0.25,-0.25),(-0.25,-0.35)
| (-0.3,-0.3),(-0.3,-0.3)
| (0.08,-0),(0,-0.56)
| (0.12,-0.28),(0.04,-0.84)
| (0.26,-0.7),(0.1,-0.82)
| (0.12,-0.84),(0.12,-0.84)
| (0.0651176557643925,0),(0,-0.0483449262493217)
| (0.0976764836465887,-0.0241724631246608),(0.0325588278821962,-0.0725173893739825)
| (0.109762715208919,-0.0562379754328844),(0.0813970697054906,-0.0604311578116521)
| (0.0976764836465887,-0.0725173893739825),(0.0976764836465887,-0.0725173893739825)
| (-0,0.0828402366863905),(-0.201183431952663,0)
| (-0.100591715976331,0.124260355029586),(-0.301775147928994,0.0414201183431953)
| (-0.251479289940828,0.103550295857988),(-0.322485207100592,0.0739644970414201)
| (-0.301775147928994,0.124260355029586),(-0.301775147928994,0.124260355029586)
| (0.2,0),(0,0)
| (0.3,0),(0.1,0)
| (0.3,0.05),(0.25,0)
| (0.3,0),(0.3,0)
(20 rows)
--
-- Paths
--
SET geqo TO 'off';
SELECT '' AS eight, npoints(f1) AS npoints, f1 AS path FROM PATH_TBL;
eight | npoints | path
-------+---------+---------------------------
| 2 | [(1,2),(3,4)]
| 2 | ((1,2),(3,4))
| 4 | [(0,0),(3,0),(4,5),(1,6)]
| 2 | ((1,2),(3,4))
| 2 | ((1,2),(3,4))
| 2 | [(1,2),(3,4)]
| 2 | [(11,12),(13,14)]
| 2 | ((11,12),(13,14))
(8 rows)
SELECT '' AS four, path(f1) FROM POLYGON_TBL;
four | path
------+---------------------
| ((2,0),(2,4),(0,0))
| ((3,1),(3,3),(1,0))
| ((0,0))
| ((0,1),(0,1))
(4 rows)
-- translation
SELECT '' AS eight, p1.f1 + point '(10,10)' AS dist_add
FROM PATH_TBL p1;
eight | dist_add
-------+-----------------------------------
| [(11,12),(13,14)]
| ((11,12),(13,14))
| [(10,10),(13,10),(14,15),(11,16)]
| ((11,12),(13,14))
| ((11,12),(13,14))
| [(11,12),(13,14)]
| [(21,22),(23,24)]
| ((21,22),(23,24))
(8 rows)
-- scaling and rotation
SELECT '' AS eight, p1.f1 * point '(2,-1)' AS dist_mul
FROM PATH_TBL p1;
eight | dist_mul
-------+------------------------------
| [(4,3),(10,5)]
| ((4,3),(10,5))
| [(0,0),(6,-3),(13,6),(8,11)]
| ((4,3),(10,5))
| ((4,3),(10,5))
| [(4,3),(10,5)]
| [(34,13),(40,15)]
| ((34,13),(40,15))
(8 rows)
RESET geqo;
--
-- Polygons
--
-- containment
SELECT '' AS twentyfour, p.f1, poly.f1, poly.f1 ~ p.f1 AS contains
FROM POLYGON_TBL poly, POINT_TBL p;
twentyfour | f1 | f1 | contains
------------+------------+---------------------+----------
| (0,0) | ((2,0),(2,4),(0,0)) | t
| (-10,0) | ((2,0),(2,4),(0,0)) | f
| (-3,4) | ((2,0),(2,4),(0,0)) | f
| (5.1,34.5) | ((2,0),(2,4),(0,0)) | f
| (-5,-12) | ((2,0),(2,4),(0,0)) | f
| (10,10) | ((2,0),(2,4),(0,0)) | f
| (0,0) | ((3,1),(3,3),(1,0)) | f
| (-10,0) | ((3,1),(3,3),(1,0)) | f
| (-3,4) | ((3,1),(3,3),(1,0)) | f
| (5.1,34.5) | ((3,1),(3,3),(1,0)) | f
| (-5,-12) | ((3,1),(3,3),(1,0)) | f
| (10,10) | ((3,1),(3,3),(1,0)) | f
| (0,0) | ((0,0)) | t
| (-10,0) | ((0,0)) | f
| (-3,4) | ((0,0)) | f
| (5.1,34.5) | ((0,0)) | f
| (-5,-12) | ((0,0)) | f
| (10,10) | ((0,0)) | f
| (0,0) | ((0,1),(0,1)) | f
| (-10,0) | ((0,1),(0,1)) | f
| (-3,4) | ((0,1),(0,1)) | f
| (5.1,34.5) | ((0,1),(0,1)) | f
| (-5,-12) | ((0,1),(0,1)) | f
| (10,10) | ((0,1),(0,1)) | f
(24 rows)
SELECT '' AS twentyfour, p.f1, poly.f1, p.f1 @ poly.f1 AS contained
FROM POLYGON_TBL poly, POINT_TBL p;
twentyfour | f1 | f1 | contained
------------+------------+---------------------+-----------
| (0,0) | ((2,0),(2,4),(0,0)) | t
| (-10,0) | ((2,0),(2,4),(0,0)) | f
| (-3,4) | ((2,0),(2,4),(0,0)) | f
| (5.1,34.5) | ((2,0),(2,4),(0,0)) | f
| (-5,-12) | ((2,0),(2,4),(0,0)) | f
| (10,10) | ((2,0),(2,4),(0,0)) | f
| (0,0) | ((3,1),(3,3),(1,0)) | f
| (-10,0) | ((3,1),(3,3),(1,0)) | f
| (-3,4) | ((3,1),(3,3),(1,0)) | f
| (5.1,34.5) | ((3,1),(3,3),(1,0)) | f
| (-5,-12) | ((3,1),(3,3),(1,0)) | f
| (10,10) | ((3,1),(3,3),(1,0)) | f
| (0,0) | ((0,0)) | t
| (-10,0) | ((0,0)) | f
| (-3,4) | ((0,0)) | f
| (5.1,34.5) | ((0,0)) | f
| (-5,-12) | ((0,0)) | f
| (10,10) | ((0,0)) | f
| (0,0) | ((0,1),(0,1)) | f
| (-10,0) | ((0,1),(0,1)) | f
| (-3,4) | ((0,1),(0,1)) | f
| (5.1,34.5) | ((0,1),(0,1)) | f
| (-5,-12) | ((0,1),(0,1)) | f
| (10,10) | ((0,1),(0,1)) | f
(24 rows)
SELECT '' AS four, npoints(f1) AS npoints, f1 AS polygon
FROM POLYGON_TBL;
four | npoints | polygon
------+---------+---------------------
| 3 | ((2,0),(2,4),(0,0))
| 3 | ((3,1),(3,3),(1,0))
| 1 | ((0,0))
| 2 | ((0,1),(0,1))
(4 rows)
SELECT '' AS four, polygon(f1)
FROM BOX_TBL;
four | polygon
------+-------------------------------------------
| ((0,0),(0,2),(2,2),(2,0))
| ((1,1),(1,3),(3,3),(3,1))
| ((2.5,2.5),(2.5,3.5),(2.5,3.5),(2.5,2.5))
| ((3,3),(3,3),(3,3),(3,3))
(4 rows)
SELECT '' AS four, polygon(f1)
FROM PATH_TBL WHERE isclosed(f1);
four | polygon
------+-------------------
| ((1,2),(3,4))
| ((1,2),(3,4))
| ((1,2),(3,4))
| ((11,12),(13,14))
(4 rows)
SELECT '' AS four, f1 AS open_path, polygon( pclose(f1)) AS polygon
FROM PATH_TBL
WHERE isopen(f1);
four | open_path | polygon
------+---------------------------+---------------------------
| [(1,2),(3,4)] | ((1,2),(3,4))
| [(0,0),(3,0),(4,5),(1,6)] | ((0,0),(3,0),(4,5),(1,6))
| [(1,2),(3,4)] | ((1,2),(3,4))
| [(11,12),(13,14)] | ((11,12),(13,14))
(4 rows)
-- convert circles to polygons using the default number of points
SELECT '' AS six, polygon(f1)
FROM CIRCLE_TBL;
six | polygon
-----+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
| ((-3,0),(-2.59807621135076,1.50000000000442),(-1.49999999999116,2.59807621135842),(1.53102359078377e-11,3),(1.50000000001768,2.59807621134311),(2.59807621136607,1.4999999999779),(3,-3.06204718156754e-11),(2.59807621133545,-1.50000000003094),(1.49999999996464,-2.59807621137373),(-4.59307077235131e-11,-3),(-1.5000000000442,-2.5980762113278),(-2.59807621138138,-1.49999999995138))
| ((-99,2),(-85.6025403783588,52.0000000001473),(-48.9999999997054,88.602540378614),(1.00000000051034,102),(51.0000000005893,88.6025403781036),(87.6025403788692,51.9999999992634),(101,1.99999999897932),(87.6025403778485,-48.0000000010313),(50.9999999988214,-84.6025403791243),(0.999999998468976,-98),(-49.0000000014732,-84.6025403775933),(-85.6025403793795,-47.9999999983795))
| ((-4,3),(-3.33012701891794,5.50000000000737),(-1.49999999998527,7.3301270189307),(1.00000000002552,8),(3.50000000002946,7.33012701890518),(5.33012701894346,5.49999999996317),(6,2.99999999994897),(5.33012701889242,0.499999999948437),(3.49999999994107,-1.33012701895622),(0.999999999923449,-2),(-1.50000000007366,-1.33012701887967),(-3.33012701896897,0.500000000081028))
| ((-2,2),(-1.59807621135076,3.50000000000442),(-0.499999999991161,4.59807621135842),(1.00000000001531,5),(2.50000000001768,4.59807621134311),(3.59807621136607,3.4999999999779),(4,1.99999999996938),(3.59807621133545,0.499999999969062),(2.49999999996464,-0.598076211373729),(0.999999999954069,-1),(-0.500000000044197,-0.598076211327799),(-1.59807621138138,0.500000000048616))
| ((90,200),(91.3397459621641,205.000000000015),(95.0000000000295,208.660254037861),(100.000000000051,210),(105.000000000059,208.66025403781),(108.660254037887,204.999999999926),(110,199.999999999898),(108.660254037785,194.999999999897),(104.999999999882,191.339745962088),(99.9999999998469,190),(94.9999999998527,191.339745962241),(91.3397459620621,195.000000000162))
| ((0,0),(13.3974596216412,50.0000000001473),(50.0000000002946,86.602540378614),(100.00000000051,100),(150.000000000589,86.6025403781036),(186.602540378869,49.9999999992634),(200,-1.02068239385585e-09),(186.602540377848,-50.0000000010313),(149.999999998821,-86.6025403791243),(99.999999998469,-100),(49.9999999985268,-86.6025403775933),(13.3974596206205,-49.9999999983795))
(6 rows)
-- convert the circle to an 8-point polygon
SELECT '' AS six, polygon(8, f1)
FROM CIRCLE_TBL;
six | polygon
-----+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
| ((-3,0),(-2.12132034355423,2.12132034356506),(1.53102359078377e-11,3),(2.12132034357588,2.1213203435434),(3,-3.06204718156754e-11),(2.12132034353258,-2.12132034358671),(-4.59307077235131e-11,-3),(-2.12132034359753,-2.12132034352175))
| ((-99,2),(-69.7106781184743,72.7106781188352),(1.00000000051034,102),(71.710678119196,72.7106781181134),(101,1.99999999897932),(71.7106781177526,-68.7106781195569),(0.999999998468976,-98),(-69.7106781199178,-68.7106781173917))
| ((-4,3),(-2.53553390592372,6.53553390594176),(1.00000000002552,8),(4.5355339059598,6.53553390590567),(6,2.99999999994897),(4.53553390588763,-0.535533905977846),(0.999999999923449,-2),(-2.53553390599589,-0.535533905869586))
| ((-2,2),(-1.12132034355423,4.12132034356506),(1.00000000001531,5),(3.12132034357588,4.1213203435434),(4,1.99999999996938),(3.12132034353258,-0.121320343586707),(0.999999999954069,-1),(-1.12132034359753,-0.121320343521752))
| ((90,200),(92.9289321881526,207.071067811884),(100.000000000051,210),(107.07106781192,207.071067811811),(110,199.999999999898),(107.071067811775,192.928932188044),(99.9999999998469,190),(92.9289321880082,192.928932188261))
| ((0,0),(29.2893218815257,70.7106781188352),(100.00000000051,100),(170.710678119196,70.7106781181134),(200,-1.02068239385585e-09),(170.710678117753,-70.7106781195569),(99.999999998469,-100),(29.2893218800822,-70.7106781173917))
(6 rows)
--
-- Circles
--
SELECT '' AS six, circle(f1, 50.0)
FROM POINT_TBL;
six | circle
-----+-----------------
| <(0,0),50>
| <(-10,0),50>
| <(-3,4),50>
| <(5.1,34.5),50>
| <(-5,-12),50>
| <(10,10),50>
(6 rows)
SELECT '' AS four, circle(f1)
FROM BOX_TBL;
four | circle
------+-------------------------
| <(1,1),1.4142135623731>
| <(2,2),1.4142135623731>
| <(2.5,3),0.5>
| <(3,3),0>
(4 rows)
SELECT '' AS two, circle(f1)
FROM POLYGON_TBL
WHERE (# f1) >= 3;
two | circle
-----+--------------------------------------------------------
| <(1.33333333333333,1.33333333333333),2.04168905063636>
| <(2.33333333333333,1.33333333333333),1.47534300379185>
(2 rows)
SELECT '' AS twentyfour, c1.f1 AS circle, p1.f1 AS point, (p1.f1 <-> c1.f1) AS distance
FROM CIRCLE_TBL c1, POINT_TBL p1
WHERE (p1.f1 <-> c1.f1) > 0
ORDER BY distance, circle, point using <<;
twentyfour | circle | point | distance
------------+----------------+------------+------------------
| <(100,0),100> | (5.1,34.5) | 0.97653192697797
| <(1,2),3> | (-3,4) | 1.47213595499958
| <(0,0),3> | (-3,4) | 2
| <(100,0),100> | (-3,4) | 3.07764064044152
| <(100,0),100> | (-5,-12) | 5.68348972285122
| <(1,3),5> | (-10,0) | 6.40175425099138
| <(1,3),5> | (10,10) | 6.40175425099138
| <(0,0),3> | (-10,0) | 7
| <(1,2),3> | (-10,0) | 8.18033988749895
| <(1,2),3> | (10,10) | 9.0415945787923
| <(0,0),3> | (-5,-12) | 10
| <(100,0),100> | (-10,0) | 10
| <(0,0),3> | (10,10) | 11.142135623731
| <(1,3),5> | (-5,-12) | 11.1554944214035
| <(1,2),3> | (-5,-12) | 12.2315462117278
| <(1,3),5> | (5.1,34.5) | 26.7657047773223
| <(1,2),3> | (5.1,34.5) | 29.757594539282
| <(0,0),3> | (5.1,34.5) | 31.8749193547455
| <(100,200),10> | (5.1,34.5) | 180.778038568384
| <(100,200),10> | (10,10) | 200.237960416286
| <(100,200),10> | (-3,4) | 211.415898254845
| <(100,200),10> | (0,0) | 213.606797749979
| <(100,200),10> | (-10,0) | 218.254244210267
| <(100,200),10> | (-5,-12) | 226.577682802077
(24 rows)