commit-gnue
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

r5920 - in trunk/gnue-common: . src/datasources src/datasources/drivers/


From: johannes
Subject: r5920 - in trunk/gnue-common: . src/datasources src/datasources/drivers/Base src/datasources/drivers/Base/Schema src/datasources/drivers/Base/Schema/Creation src/datasources/drivers/DBSIG2 src/datasources/drivers/DBSIG2/Schema src/datasources/drivers/DBSIG2/Schema/Creation src/datasources/drivers/interbase/Schema/Creation src/datasources/drivers/interbase/Schema/Discovery src/datasources/drivers/interbase/interbase src/datasources/drivers/mysql/Schema/Creation src/datasources/drivers/mysql/Schema/Discovery src/datasources/drivers/mysql/mysql src/datasources/drivers/postgresql/Base src/datasources/drivers/postgresql/Schema/Creation src/datasources/drivers/postgresql/Schema/Discovery src/schema/scripter
Date: Wed, 7 Jul 2004 17:14:00 -0500 (CDT)

Author: johannes
Date: 2004-06-21 13:11:52 -0500 (Mon, 21 Jun 2004)
New Revision: 5920

Added:
   trunk/gnue-common/src/datasources/drivers/Base/Schema/
   trunk/gnue-common/src/datasources/drivers/Base/Schema/Creation/
   trunk/gnue-common/src/datasources/drivers/Base/Schema/Creation/Creation.py
   trunk/gnue-common/src/datasources/drivers/Base/Schema/Creation/__init__.py
   trunk/gnue-common/src/datasources/drivers/Base/Schema/__init__.py
   trunk/gnue-common/src/datasources/drivers/DBSIG2/Schema/
   trunk/gnue-common/src/datasources/drivers/DBSIG2/Schema/Creation/
   trunk/gnue-common/src/datasources/drivers/DBSIG2/Schema/Creation/Creation.py
   trunk/gnue-common/src/datasources/drivers/DBSIG2/Schema/Creation/__init__.py
   trunk/gnue-common/src/datasources/drivers/DBSIG2/Schema/__init__.py
   
trunk/gnue-common/src/datasources/drivers/interbase/Schema/Creation/Creation.py
   trunk/gnue-common/src/datasources/drivers/mysql/Schema/Creation/Creation.py
   
trunk/gnue-common/src/datasources/drivers/postgresql/Schema/Creation/Creation.py
Removed:
   trunk/gnue-common/src/schema/scripter/Definition.py
   trunk/gnue-common/src/schema/scripter/processors/
Modified:
   trunk/gnue-common/BUGS
   trunk/gnue-common/src/datasources/GConnections.py
   trunk/gnue-common/src/datasources/GDataSource.py
   trunk/gnue-common/src/datasources/drivers/Base/Connection.py
   trunk/gnue-common/src/datasources/drivers/Base/DataObject.py
   trunk/gnue-common/src/datasources/drivers/Base/RecordSet.py
   trunk/gnue-common/src/datasources/drivers/Base/ResultSet.py
   trunk/gnue-common/src/datasources/drivers/Base/__init__.py
   trunk/gnue-common/src/datasources/drivers/DBSIG2/Connection.py
   
trunk/gnue-common/src/datasources/drivers/interbase/Schema/Creation/__init__.py
   
trunk/gnue-common/src/datasources/drivers/interbase/Schema/Discovery/Introspection.py
   trunk/gnue-common/src/datasources/drivers/interbase/interbase/Connection.py
   trunk/gnue-common/src/datasources/drivers/mysql/Schema/Creation/__init__.py
   
trunk/gnue-common/src/datasources/drivers/mysql/Schema/Discovery/Introspection.py
   trunk/gnue-common/src/datasources/drivers/mysql/mysql/Connection.py
   trunk/gnue-common/src/datasources/drivers/postgresql/Base/Connection.py
   
trunk/gnue-common/src/datasources/drivers/postgresql/Schema/Creation/__init__.py
   
trunk/gnue-common/src/datasources/drivers/postgresql/Schema/Discovery/Introspection.py
   trunk/gnue-common/src/schema/scripter/Scripter.py
Log:
* Added schema-creation support to the db-drivers
* Changed gnue-schema to support this new capability. gnue-schema no longer
  supports creating HTML files.


Modified: trunk/gnue-common/BUGS
===================================================================
--- trunk/gnue-common/BUGS      2004-06-18 22:11:34 UTC (rev 5919)
+++ trunk/gnue-common/BUGS      2004-06-21 18:11:52 UTC (rev 5920)
@@ -5,7 +5,15 @@
 * Different drivers return different Python datatypes for the same type of row
   (for example for date, time, and datetime fields)
 
+gnue-schema
+-----------
 
+* If the flag 'file-only' is set and multiple gsd files are processed in a
+  batch, overlapping table definitions won't be detected properly. For example
+  if a.gsd contains a table 'foo' and b.gsd contains a table 'foo' which does
+  not exist in the database schema the output-file would contain the statement
+  'create table foo' two times. 
+
 rpc
 ---
 

Modified: trunk/gnue-common/src/datasources/GConnections.py
===================================================================
--- trunk/gnue-common/src/datasources/GConnections.py   2004-06-18 22:11:34 UTC 
(rev 5919)
+++ trunk/gnue-common/src/datasources/GConnections.py   2004-06-21 18:11:52 UTC 
(rev 5920)
@@ -35,6 +35,7 @@
 #
 # HISTORY:
 #
+# $Id: $
 
 from ConfigParser import *
 import sys, string, copy, netrc
@@ -436,6 +437,13 @@
       behavior = connection.defaultBehavior
     connection.introspector = behavior(connection)
 
+    # Create the schema creator instance
+    if hasattr (connection, 'defaultCreator'):
+      connection.schemaCreator = connection.defaultCreator (connection,
+                                                       connection.introspector)
+    else:
+      connection.schemaCreator = None
+
     # Done
     connection.__connected = 1
 


Property changes on: trunk/gnue-common/src/datasources/GConnections.py
___________________________________________________________________
Name: svn:keywords
   + +Id

Modified: trunk/gnue-common/src/datasources/GDataSource.py
===================================================================
--- trunk/gnue-common/src/datasources/GDataSource.py    2004-06-18 22:11:34 UTC 
(rev 5919)
+++ trunk/gnue-common/src/datasources/GDataSource.py    2004-06-21 18:11:52 UTC 
(rev 5920)
@@ -28,6 +28,7 @@
 #
 # HISTORY:
 #
+# $Id: $
 
 from gnue.common.apps import GDebug
 from gnue.common.datasources import GDataObjects
@@ -634,4 +635,8 @@
     self._datasourceDictionary={}
     self._toplevelParent = self._type
 
+  def getIntrospector (self):
+    return self._dataObject._connection.introspector
 
+  def getSchemaCreator (self):
+    return self._dataObject._connection.schemaCreator


Property changes on: trunk/gnue-common/src/datasources/GDataSource.py
___________________________________________________________________
Name: svn:keywords
   + +Id

Modified: trunk/gnue-common/src/datasources/drivers/Base/Connection.py
===================================================================
--- trunk/gnue-common/src/datasources/drivers/Base/Connection.py        
2004-06-18 22:11:34 UTC (rev 5919)
+++ trunk/gnue-common/src/datasources/drivers/Base/Connection.py        
2004-06-21 18:11:52 UTC (rev 5920)
@@ -25,7 +25,10 @@
 #
 # NOTES:
 #
+# $Id :$
 
+import copy
+
 __all__ = ['Connection']
 
 
@@ -69,3 +72,101 @@
 
   def connect(self, connectData):
     pass
+
+  def updateSchema (self, definition, codeOnly = False):
+    """
+    This function modifies the database schema according to the given
+    definition, which is a dictionary of table defininitions (@see:
+    Schema.Creation.Creation), where the tablenames are the keys.
+
+    @param definition: sequence of table definitions
+    @param codeOnly: if TRUE, only the code will be generated to modify the
+        schema, no actions take place.
+    @return: a tuple of three sequences (prologue, body, epilogue) holding the
+        code to perform all schema modifications needed. These sequences should
+        be executed in this order to successfully create the schema.
+    """
+
+    result = ([], [], [])
+
+    if self.schemaCreator is None:
+      return result
+
+    workingSet    = copy.copy (definition)
+    constraintSet = {}
+
+    # before any actions are performed, validate the given definitions
+    for table in definition:
+      self.schemaCreator.validate (table)
+
+    # in a first run we remove all constraint definitions from the tables, so
+    # we can add or alter all tables without having troubles with order of
+    # occurence.
+    for table in workingSet:
+      # Do we have already a table with that name?
+      res = self.introspector.find (name = table ['name'])
+
+      if res is not None:
+        method = self.schemaCreator.modifyTable
+        existingFields = [f.name for f in res [0].fields ()]
+
+        # keep only new fields
+        keep = []
+        for field in table ['fields']:
+          if not field ['name'] in existingFields:
+            keep.append (field)
+
+        table ['fields'] = keep
+
+        # on updates of a table we cannot use a primary key
+        if table.has_key ('primarykey'):
+          del table ['primarykey']
+
+        # we create an index on a table update only if it contains new fields
+        if table.has_key ('indices'):
+          keep = []
+          for index in table ['indices']:
+            for field in index ['fields']:
+              if not field in existingFields:
+                keep.append (index)
+                break
+
+          table ['indices'] = keep
+
+        # we create a constraint on a table update only if it contains new
+        # fields
+        if table.has_key ('constraints'):
+          keep = []
+
+          for constraint in table ['constraints']:
+            for field in constraint ['fields']:
+              if not field in existingFields:
+                keep.append (constraint)
+                break
+
+          table ['constraints'] = keep
+
+      else:
+        method = self.schemaCreator.createTable
+
+      if table.has_key ('constraints'):
+        constraintSet [table ['name']] = {'name':        table ['name'],
+                                          'constraints': table ['constraints']}
+        del table ['constraints']
+
+      # before we execute the planned action, have a look if there's still work
+      # to be done
+      perform = table.has_key ('fields') and len (table ['fields'])
+      perform = perform or table.has_key ('primarykey')
+      perform = perform or (table.has_key ('indices') and \
+                            len (table ['indices']))
+
+      if perform:
+        self.schemaCreator.mergeTuple (result, method (table, codeOnly))
+
+    # on the second run we process all constraints
+    for table in constraintSet.values ():
+      self.schemaCreator.mergeTuple (result,
+                              self.schemaCreator.createTable (table, codeOnly))
+
+    return result


Property changes on: 
trunk/gnue-common/src/datasources/drivers/Base/Connection.py
___________________________________________________________________
Name: svn:keywords
   + +Id

Modified: trunk/gnue-common/src/datasources/drivers/Base/DataObject.py
===================================================================
--- trunk/gnue-common/src/datasources/drivers/Base/DataObject.py        
2004-06-18 22:11:34 UTC (rev 5919)
+++ trunk/gnue-common/src/datasources/drivers/Base/DataObject.py        
2004-06-21 18:11:52 UTC (rev 5920)
@@ -25,6 +25,7 @@
 #
 # NOTES:
 #
+# $Id :$
 
 __all__ = ['DataObject']
 


Property changes on: 
trunk/gnue-common/src/datasources/drivers/Base/DataObject.py
___________________________________________________________________
Name: svn:keywords
   + +Id

Modified: trunk/gnue-common/src/datasources/drivers/Base/RecordSet.py
===================================================================
--- trunk/gnue-common/src/datasources/drivers/Base/RecordSet.py 2004-06-18 
22:11:34 UTC (rev 5919)
+++ trunk/gnue-common/src/datasources/drivers/Base/RecordSet.py 2004-06-21 
18:11:52 UTC (rev 5920)
@@ -25,6 +25,7 @@
 #
 # NOTES:
 #
+# $Id :$
 
 __all__ = ['RecordSet']
 


Property changes on: trunk/gnue-common/src/datasources/drivers/Base/RecordSet.py
___________________________________________________________________
Name: svn:keywords
   + +Id

Modified: trunk/gnue-common/src/datasources/drivers/Base/ResultSet.py
===================================================================
--- trunk/gnue-common/src/datasources/drivers/Base/ResultSet.py 2004-06-18 
22:11:34 UTC (rev 5919)
+++ trunk/gnue-common/src/datasources/drivers/Base/ResultSet.py 2004-06-21 
18:11:52 UTC (rev 5920)
@@ -25,6 +25,7 @@
 #
 # NOTES:
 #
+# $Id :$
 
 __all__ = ['ResultSet']
 


Property changes on: trunk/gnue-common/src/datasources/drivers/Base/ResultSet.py
___________________________________________________________________
Name: svn:keywords
   + +Id


Property changes on: trunk/gnue-common/src/datasources/drivers/Base/Schema
___________________________________________________________________
Name: svn:ignore
   + *.pyc



Property changes on: 
trunk/gnue-common/src/datasources/drivers/Base/Schema/Creation
___________________________________________________________________
Name: svn:ignore
   + *.pyc


Added: 
trunk/gnue-common/src/datasources/drivers/Base/Schema/Creation/Creation.py
===================================================================
--- trunk/gnue-common/src/datasources/drivers/Base/Schema/Creation/Creation.py  
2004-06-18 22:11:34 UTC (rev 5919)
+++ trunk/gnue-common/src/datasources/drivers/Base/Schema/Creation/Creation.py  
2004-06-21 18:11:52 UTC (rev 5920)
@@ -0,0 +1,595 @@
+#
+# This file is part of GNU Enterprise.
+#
+# GNU Enterprise is free software; you can redistribute it
+# and/or modify it under the terms of the GNU General Public
+# License as published by the Free Software Foundation; either
+# version 2, or (at your option) any later version.
+#
+# GNU Enterprise is distributed in the hope that it will be
+# useful, but WITHOUT ANY WARRANTY; without even the implied
+# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+# PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public
+# License along with program; see the file COPYING. If not,
+# write to the Free Software Foundation, Inc., 59 Temple Place
+# - Suite 330, Boston, MA 02111-1307, USA.
+#
+# Copyright 2001-2004 Free Software Foundation
+#
+# $Id$
+
+import gnue
+
+# =============================================================================
+# Exceptions
+# =============================================================================
+
+class Error (gException):
+  pass
+
+class DefinitionError (Error):
+  pass
+
+class MissingKeyError (DefinitionError):
+  MSG = u_("The definition has no attribute '%s'")
+  def __init__ (self, attribute):
+    DefinitionError.__init__ (self, self.MSG % attribute)
+
+class TableDefinitionError (MissingKeyError):
+  MSG = u_("The table definition has no attribute '%s'")
+
+class FieldDefinitionError (MissingKeyError):
+  MSG = u_("The field definition has no attribute '%s'")
+
+class PrimaryKeyDefinitionError (MissingKeyError):
+  MSG = u_("Primarykey definition has no attribute '%s'")
+
+class PrimaryKeyFieldsError (Error):
+  def __init__ (self, table, name):
+    msg = u_("Primarykey '%(name)s' of table '%(table)s' has no fields") \
+          % {'name' : name,
+             'table': table}
+    Error.__init__ (self, msg)
+
+class PrimaryKeyError (DefinitionError):
+  def __init__ (self, table):
+    msg = u_("Table '%s' has a primary key which is not allowed on "
+             "table modification") % table
+    DefinitionError.__init__ (self, msg)
+
+class IndexDefinitionError (MissingKeyError):
+  MSG = u_("Index definition has no attribute '%s'")
+
+class IndexFieldsError (Error):
+  def __init__ (self, table, name):
+    msg = u_("Index '%(name)s' of table '%(table)s' has no fields") \
+          % {'name' : name,
+             'table': table}
+    Error.__init__ (self, msg)
+
+class ConstraintDefinitionError (MissingKeyError):
+  MSG = u_("Constraint definition has no attribute '%s'")
+
+class ConstraintFieldsError (Error):
+  def __init__ (self, table, name, fields):
+    msg = u_("Constraint '%(name)s' of table '%(table)s' has no '%(fields)s'")\
+          % {'name'  : name,
+             'table' : table,
+             'fields': fields}
+    Error.__init__ (self, msg)
+
+class ConstraintTypeError (Error):
+  def __init__ (self, table, name, cType):
+    msg = u_("Type '%(type)s' of constraint '%(name)s' in table '%(table)s' "
+             "not supported") % \
+            {'table': table,
+             'name' : name,
+             'type' : cType}
+    Error.__init__ (self, msg)
+
+class MissingTypeTransformationError (Error):
+  def __init__ (self, typename):
+    msg = u_("No type transformation for '%s' found") % typename
+    Error.__init__ (self, msg)
+
+
+class LengthError (Error):
+  def __init__ (self, identifier, maxlen):
+    msg = u_("The idendifier '%(identifier)s' exceeds the maximum length "
+             "of %(maxlength)d characters") \
+          % {'identifier': identifier,
+             'maxlength': maxlen or 0}
+    Error.__init__ (self, msg)
+
+
+# =============================================================================
+# Base class for drivers schema creation support
+# =============================================================================
+class Creation:
+
+  MAX_NAME_LENGTH = None        # Max. length of an identifier
+  END_COMMAND     = ""          # Character used for command termination
+  
+  # ---------------------------------------------------------------------------
+  # Constructor
+  # ---------------------------------------------------------------------------
+
+  def __init__ (self, connection = None, introspector = None):
+    self.connection   = connection
+    self.introspector = introspector
+
+    if connection is not None and introspector is None:
+      self.introspector = connection.introspector
+
+
+  # ---------------------------------------------------------------------------
+  # Create a table from a table definition
+  # ---------------------------------------------------------------------------
+
+  def createTable (self, tableDefinition, codeOnly = False):
+    """
+    This function creates a table using the given definition and returns a
+    code-tuple, which can be used to to this. 
+
+    @param tableDefinition: a dictionary of the table to be created 
+    @param codeOnly: if TRUE no operation takes place, but only the code will
+        be returned.
+    @return: a tuple of sequences (prologue, body, epliogue) containing the
+        code to perform the action.
+    """
+    self._validateTable (tableDefinition)
+    return ([], [], [])
+  
+
+  # ---------------------------------------------------------------------------
+  # Create a primary key
+  # ---------------------------------------------------------------------------
+
+  def createPrimaryKey (self, tableName, keyDefinition, codeOnly = False):
+    """
+    This function creates a primary key for the given table using the primary
+    key definition.
+
+    @param tableName: name of the table for which a key should be created
+    @param keyDefinition: a dictionary of the primary key to be created 
+    @param codeOnly: if TRUE no operation takes place, but only the code will
+        be returned.
+    @return: a tuple of sequences (prologue, body, epliogue) containing the
+        code to perform the action.
+    """
+    self._validatePrimaryKey (tableName, keyDefinition)
+    return ([], [], [])
+
+
+  # ---------------------------------------------------------------------------
+  # Create an index
+  # ---------------------------------------------------------------------------
+
+  def createIndex (self, tableName, indexDefinition, codeOnly = False):
+    """
+    This function creates an index for the given table using the index
+    definition.
+
+    @param tableName: name of the table for which an index should be created
+    @param indexDefinition: a dictionary of the index to be created 
+    @param codeOnly: if TRUE no operation takes place, but only the code will
+        be returned.
+    @return: a tuple of sequences (prologue, body, epliogue) containing the
+        code to perform the action.
+    """
+    self._validateIndex (tableName, indexDefinition)
+    return ([], [], [])
+
+
+  # ---------------------------------------------------------------------------
+  # Create a constraint
+  # ---------------------------------------------------------------------------
+
+  def createConstraint (self, tableName, constraintDef, codeOnly = False):
+    """
+    This function creates a constraint for the given table using the constraint
+    definition.
+
+    @param tableName: name of the table for which an index should be created
+    @param constraintDef: a dictionary of the constraint to be created 
+    @param codeOnly: if TRUE no operation takes place, but only the code will
+        be returned.
+    @return: a tuple of sequences (prologue, body, epliogue) containing the
+        code to perform the action.
+    """
+    self._validateConstraint (tableName, constraintDef)
+    return ([], [], [])
+
+
+  # ---------------------------------------------------------------------------
+  # Modify a table
+  # ---------------------------------------------------------------------------
+
+  def modifyTable (self, tableDefinition, codeOnly = False):
+    """
+    This function modifies a table according to the given definition.
+
+    @param tableDefinition: a dictionary of the table to be modified
+    @param codeOnly: if TRUE no operation takes place, but only the code will
+        be returned.
+    @return: a tuple of sequences (prologue, body, epliogue) containing the
+        code to perform the action.
+    """
+    self._validateTable (tableDefinition, True)
+    return ([], [], [])
+
+
+  # ---------------------------------------------------------------------------
+  # Create fields for a table
+  # ---------------------------------------------------------------------------
+
+  def createFields (self, tableName, fields, forAlter = False):
+    """
+    This function creates all listed fields in the given table. If forAlter is
+    TRUE this function should create the fields for a table modification.
+
+    @param tableName: name of the table for which fields should be created or
+        modified.
+    @param fields: a list of field definition dictionaries, describing the
+        fields to be created or modified.
+    @param forAlter: if TRUE the fields should be modified, otherwise created
+    @return: a tuple of sequences (prologue, body, epliogue) containing the
+        code to perform the action.
+    """
+    for field in fields:
+      self._validateField (tableName, field)
+    return ([], [], [])
+
+
+  # ---------------------------------------------------------------------------
+  # Check wether an element exists or not
+  # ---------------------------------------------------------------------------
+
+  def exists (self, elementName, elementType = None):
+    """
+    This function examines, wether an element exists in a datamodel or not.
+    It's doing this using the given introspector. If no introspecor is
+    available the result is FALSE.
+
+    @param elementName: name of the element to be examined
+    @param elementType: type of the element to be examined (optional)
+    @return: TRUE if the element was found, otherwise FALSE
+    """
+    if self.introspector is not None:
+      return self.introspector.find (name = elementName, type = elementType)
+    else:
+      return False
+
+
+  # ---------------------------------------------------------------------------
+  # Make a string safe for output
+  # ---------------------------------------------------------------------------
+
+  def escapeString (self, aString):
+    """
+    This function makes a string safe for output.
+    @param aString: string which should be made safe.
+    @return: escaped string
+    """
+    return aString
+
+
+  # ---------------------------------------------------------------------------
+  # Quote and escape a string
+  # ---------------------------------------------------------------------------
+
+  def quoteString (self, aString):
+    """
+    This function escapes the given string and quotes it. @see: escapeString
+    @param aString: string to be quoted
+    @return: quoted and escaped string
+    """
+    return "%s%s%s" % (self.QUOTECHAR, self.escapeString (aString),
+                       self.QUOTECHAR)
+
+
+  # ---------------------------------------------------------------------------
+  # Validate a given table definition
+  # ---------------------------------------------------------------------------
+
+  def validate (self, tableDef):
+    """
+    This function validates all parts of a table definition.
+    @param tableDef: dictionary describing the table and it's parts.
+    """
+    self._validateTable (tableDef)
+    tableName = tableDef['name']
+
+    if tableDef.has_key ('primarykey'):
+      self._validatePrimaryKey (tableName, tableDef ['primarykey'])
+
+    if tableDef.has_key ('fields'):
+      for field in tableDef ['fields']:
+        self._validateField (tableName, field)
+
+    if tableDef.has_key ('indices'):
+      for index in tableDef ['indices']:
+        self._validateIndex (tableName, index)
+
+    if tableDef.has_key ('constraints'):
+      for constraint in tableDef ['constraints']:
+        self._validateConstraint (tableName, constraint)
+
+
+
+  # ---------------------------------------------------------------------------
+  # Call the appropriate method for a type-transformation
+  # ---------------------------------------------------------------------------
+
+  def _translateType (self, fieldDefinition):
+    """
+    This function calls the appropriate method for a type-conversion according
+    to the field definition's datatype and returns this method's result.
+
+    @param fieldDefinition: dictionary describing the field.
+    @return: a string with the native data type for the field definition.
+    """
+    if not fieldDefinition.has_key ('type'):
+      raise FieldDefinitionError, ('type')
+
+    aMethod = self.__findMethod (self.__class__, fieldDefinition ['type'])
+    if aMethod is None:
+      raise MissingTypeTransformationError, (fieldDefinition ['type'])
+
+    return aMethod (self, fieldDefinition)
+
+
+  # ---------------------------------------------------------------------------
+  # Create code for a single field
+  # ---------------------------------------------------------------------------
+
+  def _processField (self, tableName, fieldDef, forAlter = False):
+    """
+    This function creates a portion of code which defines the given field in
+    the table tableName. 
+    
+    @param tableName: the table this field belongs to.
+    @param fieldDef: the dictionary describing the field.
+    @param forAlter: If TRUE this function produces code for a table
+        modification, otherwise for a table creation.
+    @return: a tuple of sequences (prologue, body, epliogue) containing the
+        code to perform the action.
+    """
+    return ([], [], [])
+
+
+  # ---------------------------------------------------------------------------
+  # Create a usable name for a seuquence like object
+  # ---------------------------------------------------------------------------
+
+  def _getSequenceName (self, tableName, fieldDefinition):
+    """
+    This function creates a name for a sequence like object using the table-
+    and fieldname. It respects a given restriction of identifier length.
+
+    @param tableName: name of the table
+    @param fieldDefinition: dictionary describing the field
+    @return: string with a name for the given sequence
+    """
+
+    res = "%s_%s_seq" % (tableName, fieldDefinition ['name'])
+    if self._nameTooLong (res):
+      res = "%s_%s_seq" % (tableName, id (fieldDefinition))
+
+    if self._nameTooLong (res):
+      res = "%s_seq" % (id (fieldDefinition))
+
+    return self._shortenName (res)
+
+
+  # ---------------------------------------------------------------------------
+  # Check if an identifier is too long
+  # ---------------------------------------------------------------------------
+
+  def _nameTooLong (self, aName):
+    """
+    This function returns TRUE if @aName exceeds MAX_NAME_LENGTH, otherwise
+    FALSE. 
+    """
+    return (self.MAX_NAME_LENGTH is not None) and \
+           (len (aName) > self.MAX_NAME_LENGTH)
+
+
+  # ---------------------------------------------------------------------------
+  # Make sure a given identifier doesn't exceed maximum length
+  # ---------------------------------------------------------------------------
+
+  def _shortenName (self, aName):
+    """
+    This function makes sure the given name doesn't exceed the maximum
+    identifier length.
+    @param aName: identifier to be checked
+    @return: identifier with extra characters cut off
+    """
+    if self._nameTooLong (aName):
+      return aName [:self.MAX_NAME_LENGTH]
+    else:
+      return aName
+
+
+  # ---------------------------------------------------------------------------
+  # Merge all sequences in the given tuples 
+  # ---------------------------------------------------------------------------
+
+  def mergeTuple (self, mergeInto, mergeFrom):
+    """
+    This function merges the sequences in the given tuples and returns the
+    first one (which is changes as a side effect too).
+    @param mergeInto: tuple with sequences which gets extended
+    @param mergeFrom: tuple with sequences which mergeInto gets extended with
+    @return: tuple of the same length as mergeInto with all sequences merged
+        together.
+    """
+    for ix in range (len (mergeInto)):
+      mergeInto [ix].extend (mergeFrom [ix])
+    return mergeInto
+
+
+  # ---------------------------------------------------------------------------
+  # Validate a table definition
+  # ---------------------------------------------------------------------------
+
+  def _validateTable (self, tableDef, forAlter = False):
+    """
+    This function validates a table definition.
+    @param tableDef: dictionary describing the table
+
+    @raise TableDefinitionError: If tableDef has no key 'name'
+    """
+    self.__validateDefinition (tableDef, ['name'], TableDefinitionError)
+    if self._nameTooLong (tableDef ['name']):
+      raise LengthError, (tableDef ['name'], self.MAX_NAME_LENGTH)
+
+    if forAlter and tableDef.has_key ('primarykey'):
+      raise PrimaryKeyError, (tableDef ['name'])
+
+
+  # ---------------------------------------------------------------------------
+  # Validate a given primary key definition
+  # ---------------------------------------------------------------------------
+
+  def _validatePrimaryKey (self, tableName, keyDefinition):
+    """
+    This function validates a primarykey definition.
+    @param tableName: name of the table the primary key belongs to
+    @param keyDefinition: dictionary describing the primary key
+
+    @raise PrimaryKeyDefinitionError: if 'name' or 'fields' are missing in the
+        definition.
+    @raise PrimaryKeyFieldsError: if 'fields' is an empty sequence
+    """
+    self.__validateDefinition (keyDefinition, ['name', 'fields'],
+                               PrimaryKeyDefinitionError)
+
+    if not len (keyDefinition ['fields']):
+      raise PrimaryKeyFieldsError, (tableName, keyDefinition ['name'])
+
+    for field in keyDefinition ['fields']:
+      if self._nameTooLong (field):
+        raise LengthError, (field, self.MAX_NAME_LENGTH)
+
+
+  # ---------------------------------------------------------------------------
+  # Validate a given index definition
+  # ---------------------------------------------------------------------------
+
+  def _validateIndex (self, tableName, indexDefinition):
+    """
+    This function validates an index definition.
+    @param tableName: name of the table
+    @param indexDefinition: dictionary describing the index
+
+    @raise IndexDefinitionError: if 'name' or 'fields' are missing in the
+        definition.
+    @raise IndexFieldsError: if 'fields' is an empty sequence
+    """
+    self.__validateDefinition (indexDefinition, ['name', 'fields'],
+                               IndexDefinitionError)
+    if not len (indexDefinition ['fields']):
+      raise IndexFieldsError, (tableName, indexDefinition ['name'])
+
+    for field in indexDefinition ['fields']:
+      if self._nameTooLong (field):
+        raise LengthError, (field, self.MAX_NAME_LENGTH)
+
+
+  # ---------------------------------------------------------------------------
+  # Validate a given constraint definition
+  # ---------------------------------------------------------------------------
+
+  def _validateConstraint (self, tableName, constDef):
+    """
+    This function validates a constraint definition.
+    @param tableName: name of the table the constraint belongs to
+    @param constDef: the dictionary describing the constraint
+
+    @raise ConstraintDefinitionError: if 'name' or 'fields' are missing in the
+        definition.
+    @raise ConstraintFieldsError: if 'fields' or 'reffields' is an empty
+        sequence.
+    """
+    self.__validateDefinition (constDef,
+        ['name', 'fields', 'reftable', 'reffields'], ConstraintDefinitionError)
+
+    if not len (constDef ['fields']):
+      raise ConstraintFieldsError, (tableName, constDef ['name'], 'fields')
+    if not len (constDef ['reffields']):
+      raise ConstraintFieldsError, (tableName, constDef ['name'], 'reffields')
+
+    if constDef.has_key ('type') and constDef ['type'] != 'foreignkey':
+      raise ConstraintTypeError, (tableName, constDef ['name'],
+                                  constDef ['type'])
+
+    if self._nameTooLong (constDef ['reftable']):
+      raise LengthError, (constDef ['reftable'], self.MAX_NAME_LENGTH)
+
+    for field in constDef ['fields'] + constDef ['reffields']:
+      if self._nameTooLong (field):
+        raise LengthError, (field, self.MAX_NAME_LENGTH)
+
+
+  # ---------------------------------------------------------------------------
+  # Validate a field definition
+  # ---------------------------------------------------------------------------
+
+  def _validateField (self, tableName, fieldDef):
+    """
+    This function validates a field definition.
+    @param tableName: name of the table
+    @param fieldDef: dictionary describing the field
+
+    @raise FieldDefinitionError: If the dictionary has no 'name' and 'type'
+        keys.
+    """
+    self.__validateDefinition (fieldDef, ['name', 'type'], 
FieldDefinitionError)
+    if self._nameTooLong (fieldDef ['name']):
+      raise LengthError, (fieldDef ['name'], self.MAX_NAME_LENGTH)
+
+
+  # ---------------------------------------------------------------------------
+  # Validate all keys in an arbitrary definition
+  # ---------------------------------------------------------------------------
+
+  def __validateDefinition (self, definition, keys, defError):
+    """
+    This function raises an exception if a key in the given sequence is missing
+    in the definition.
+    @param definition: dictionary to be checked
+    @param keys: sequence of keys which must exist in definition
+    @param defError: DefinitionError class raised on a missing key
+    """
+    for key in keys:
+      if not definition.has_key (key):
+        raise defError, (key)
+
+
+  # ---------------------------------------------------------------------------
+  # find a method in a class or its' superclasses
+  # ---------------------------------------------------------------------------
+
+  def __findMethod (self, aClass, aMethod):
+    """
+    This function looks for a method in a class and all its' superclasses.
+
+    @param aClass: the class where the search starts
+    @param aMethod: name of the method to be looked for
+    @return: function pointer to the method found or None if search failed.
+    """
+
+    if aClass.__dict__.has_key (aMethod):
+      return aClass.__dict__ [aMethod]
+    else:
+      for base in aClass.__bases__:
+        result = self.__findMethod (base, aMethod)
+        if result is not None:
+          return result
+
+    return None
+


Property changes on: 
trunk/gnue-common/src/datasources/drivers/Base/Schema/Creation/Creation.py
___________________________________________________________________
Name: svn:keywords
   + Id

Added: 
trunk/gnue-common/src/datasources/drivers/Base/Schema/Creation/__init__.py
===================================================================


Property changes on: 
trunk/gnue-common/src/datasources/drivers/Base/Schema/Creation/__init__.py
___________________________________________________________________
Name: svn:keywords
   + Id

Added: trunk/gnue-common/src/datasources/drivers/Base/Schema/__init__.py
===================================================================
--- trunk/gnue-common/src/datasources/drivers/Base/Schema/__init__.py   
2004-06-18 22:11:34 UTC (rev 5919)
+++ trunk/gnue-common/src/datasources/drivers/Base/Schema/__init__.py   
2004-06-21 18:11:52 UTC (rev 5920)
@@ -0,0 +1,2 @@
+# Indicate that this is no plugin
+__noplugin__ = True

Modified: trunk/gnue-common/src/datasources/drivers/Base/__init__.py
===================================================================
--- trunk/gnue-common/src/datasources/drivers/Base/__init__.py  2004-06-18 
22:11:34 UTC (rev 5919)
+++ trunk/gnue-common/src/datasources/drivers/Base/__init__.py  2004-06-21 
18:11:52 UTC (rev 5920)
@@ -25,6 +25,7 @@
 #
 # NOTES:
 #
+# $Id :$
 
 # Indicate that this is no plugin
 __noplugin__ = True


Property changes on: trunk/gnue-common/src/datasources/drivers/Base/__init__.py
___________________________________________________________________
Name: svn:keywords
   + +Id

Modified: trunk/gnue-common/src/datasources/drivers/DBSIG2/Connection.py
===================================================================
--- trunk/gnue-common/src/datasources/drivers/DBSIG2/Connection.py      
2004-06-18 22:11:34 UTC (rev 5919)
+++ trunk/gnue-common/src/datasources/drivers/DBSIG2/Connection.py      
2004-06-21 18:11:52 UTC (rev 5920)
@@ -156,6 +156,9 @@
       # mx.DateTime
       return self._driver.Timestamp (value.year, value.month, value.day,
                                      value.hour, value.minute, value.second)
+    elif isinstance (value, mx.DateTime.DateTimeDeltaType):
+      return self._driver.Timestamp (1, 1, 1,
+                                     value.hour, value.minute, value.second)
     else:
       # Strings, Integers
       return value


Property changes on: trunk/gnue-common/src/datasources/drivers/DBSIG2/Schema
___________________________________________________________________
Name: svn:ignore
   + *.pyc



Property changes on: 
trunk/gnue-common/src/datasources/drivers/DBSIG2/Schema/Creation
___________________________________________________________________
Name: svn:ignore
   + *.pyc


Added: 
trunk/gnue-common/src/datasources/drivers/DBSIG2/Schema/Creation/Creation.py
===================================================================
--- 
trunk/gnue-common/src/datasources/drivers/DBSIG2/Schema/Creation/Creation.py    
    2004-06-18 22:11:34 UTC (rev 5919)
+++ 
trunk/gnue-common/src/datasources/drivers/DBSIG2/Schema/Creation/Creation.py    
    2004-06-21 18:11:52 UTC (rev 5920)
@@ -0,0 +1,541 @@
+#
+# This file is part of GNU Enterprise.
+#
+# GNU Enterprise is free software; you can redistribute it
+# and/or modify it under the terms of the GNU General Public
+# License as published by the Free Software Foundation; either
+# version 2, or (at your option) any later version.
+#
+# GNU Enterprise is distributed in the hope that it will be
+# useful, but WITHOUT ANY WARRANTY; without even the implied
+# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+# PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public
+# License along with program; see the file COPYING. If not,
+# write to the Free Software Foundation, Inc., 59 Temple Place
+# - Suite 330, Boston, MA 02111-1307, USA.
+#
+# Copyright 2001-2004 Free Software Foundation
+#
+# $Id$
+
+import string
+from gnue.common.datasources.drivers.Base.Schema.Creation import \
+    Creation as Base
+
+# =============================================================================
+# Exceptions
+# =============================================================================
+
+class NumericTransformationError (Base.Error):
+  def __init__ (self, length, scale):
+    msg = u_("No numeric transformation for number (%(length)d,%(scale)d)") \
+          % {'length': length, 'scale': scale}
+    Base.Error.__init__ (self, msg)
+
+# =============================================================================
+# This class implement the base creation for SQL like drivers
+# =============================================================================
+
+class Creation (Base.Creation):
+
+  ALTER_MULTIPLE   = True
+  EXTRA_PRIMARYKEY = False
+  END_COMMAND      = ";"
+
+  # ---------------------------------------------------------------------------
+  # Create a table
+  # ---------------------------------------------------------------------------
+
+  def createTable (self, tableDefinition, codeOnly = False):
+    """
+    This function creates a table using the given definition and returns a
+    code-tuple, which can be used to to this. 
+
+    @param tableDefinition: a dictionary of the table to be created 
+    @param codeOnly: if TRUE no operation takes place, but only the code will
+        be returned.
+    @return: a tuple of sequences (prologue, body, epliogue) containing the
+        code to perform the action.
+    """
+
+    res = Base.Creation.createTable (self, tableDefinition, codeOnly)
+    body = res [1]
+
+    tableName = tableDefinition ['name']
+    
+    if tableDefinition.has_key ('fields'):
+      fields = tableDefinition ['fields']
+
+      fCode = self.createFields (tableName, fields, False)
+
+      if tableDefinition.has_key ('primarykey'):
+        if self.EXTRA_PRIMARYKEY:
+          self.mergeTuple (res, self.createPrimaryKey (tableName,
+                                        tableDefinition ['primarykey'], True))
+        else:
+          self.mergeTuple (fCode, self.createPrimaryKey (tableName,
+                                        tableDefinition ['primarykey'], True))
+
+      code = u"CREATE TABLE %s (%s)%s" % \
+              (tableName, string.join (fCode [1], ", "), self.END_COMMAND)
+      self.mergeTuple (res, (fCode [0], [code], fCode [2]))
+
+    # Create all requested indices
+    if tableDefinition.has_key ('indices'):
+      for ixDef in tableDefinition ['indices']:
+        self.mergeTuple (res, self.createIndex (tableName, ixDef, True))
+
+    # Add all constraints
+    if tableDefinition.has_key ('constraints'):
+      for constraintDef in  tableDefinition ['constraints']:
+        self.mergeTuple (res, \
+           self.createConstraint (tableName, constraintDef, True))
+
+    if not codeOnly:
+      self._executeCodeTuple (res)
+
+    return res
+
+
+  # ---------------------------------------------------------------------------
+  # Modify a table
+  # ---------------------------------------------------------------------------
+
+  def modifyTable (self, tableDefinition, codeOnly = False):
+    """
+    This function modifies a table according to the given definition.
+
+    @param tableDefinition: a dictionary of the table to be modified
+    @param codeOnly: if TRUE no operation takes place, but only the code will
+        be returned.
+    @return: a tuple of sequences (prologue, body, epliogue) containing the
+        code to perform the action.
+    """
+
+    res  = Base.Creation.modifyTable (self, tableDefinition, codeOnly)
+    body = res [1]
+
+    table = tableDefinition ['name']
+
+    if tableDefinition.has_key ('fields'):
+      if self.ALTER_MULTIPLE:
+        item = tableDefinition ['fields']
+        fCode = self.createFields (table, item, True)
+        code = u"ALTER TABLE %s ADD (%s)%s" % \
+                  (table, string.join (fCode [1], ", "), self.END_COMMAND)
+        self.mergeTuple (res, (fCode [0], [code], fCode [2]))
+
+      else:
+        fields = [[fDef] for fDef in tableDefinition ['fields']]
+        for item in fields:
+          fCode = self.createFields (table, item, True)
+          code = u"ALTER TABLE %s ADD %s%s" % \
+              (table, string.join (fCode [1], ", "), self.END_COMMAND)
+          self.mergeTuple (res, (fCode [0], [code], fCode [2]))
+
+    # Create all requested indices
+    if tableDefinition.has_key ('indices'):
+      for ixDef in tableDefinition ['indices']:
+        self.mergeTuple (res, self.createIndex (table, ixDef, True))
+
+    # Add all constraints
+    if tableDefinition.has_key ('constraints'):
+      for constraintDef in  tableDefinition ['constraints']:
+        self.mergeTuple (res, \
+           self.createConstraint (table, constraintDef, True))
+
+    if not codeOnly:
+      self._executeCodeTuple (res)
+
+    return res
+
+
+  # ---------------------------------------------------------------------------
+  # Create or modify fields for a table
+  # ---------------------------------------------------------------------------
+
+  def createFields (self, tableName, fields, forAlter = False):
+    """
+    This function calls _processField for each listed field and merges this
+    result into its own code-tuple.
+
+    @param tableName: name of the table for which fields should be created or
+        modified.
+    @param fields: a list of field definition dictionaries, describing the
+        fields to be created or modified.
+    @param forAlter: if TRUE the fields should be modified, otherwise created
+    @return: a tuple of sequences (prologue, body, epliogue) containing the
+        code to perform the action.
+    """
+    res = Base.Creation.createFields (self, tableName, fields, forAlter)
+    for field in fields:
+      self.mergeTuple (res, self._processField (tableName, field, forAlter))
+    return res
+
+
+  # ---------------------------------------------------------------------------
+  # Create a primary key
+  # ---------------------------------------------------------------------------
+
+  def createPrimaryKey (self, tableName, keyDefinition, codeOnly = False):
+    """
+    This function creates a primary key for the given table using the primary
+    key definition. If the constant EXTRA_PRIMARYKEY is true, an 'alter
+    table'-statement will be created, otherwise the primary key constraint will
+    be added to table creation/modification statement.
+
+    @param tableName: name of the table for which a key should be created
+    @param keyDefinition: a dictionary of the primary key to be created 
+    @param codeOnly: if TRUE no operation takes place, but only the code will
+        be returned.
+    @return: a tuple of sequences (prologue, body, epliogue) containing the
+        code to perform the action.
+    """
+    res = Base.Creation.createPrimaryKey (self, tableName, keyDefinition,
+                                          codeOnly)
+    fields  = string.join (keyDefinition ['fields'], ", ")
+    keyName = self._shortenName (keyDefinition ['name'])
+    code = u"CONSTRAINT %s PRIMARY KEY (%s)" % (keyName, fields)
+    if self.EXTRA_PRIMARYKEY:
+      res [2].append (u"ALTER TABLE %s ADD %s" % (tableName, code))
+    else:
+      res [1].append (code)
+
+    return res
+
+
+  # ---------------------------------------------------------------------------
+  # Create a new index for a table
+  # ---------------------------------------------------------------------------
+
+  def createIndex (self, tableName, indexDefinition, codeOnly = False):
+    """
+    This function creates an index for the given table using the index
+    definition.
+
+    @param tableName: name of the table for which an index should be created
+    @param indexDefinition: a dictionary of the index to be created 
+    @param codeOnly: if TRUE no operation takes place, but only the code will
+        be returned.
+    @return: a tuple of sequences (prologue, body, epliogue) containing the
+        code to perform the action.
+    """
+    res = Base.Creation.createIndex (self, tableName, indexDefinition,
+                                     codeOnly)
+    unique = indexDefinition.has_key ('unique') and indexDefinition ['unique']
+    indexName = self._shortenName (indexDefinition ['name'])
+
+    body = res [1]
+    body.append (u"CREATE %sINDEX %s ON %s (%s)%s" % \
+        (unique and "UNIQUE " or "", indexName, tableName,
+         string.join (indexDefinition ['fields'], ", "), self.END_COMMAND))
+
+    if not codeOnly:
+      self._executeCodeTuple (res)
+
+    return res
+
+
+  # ---------------------------------------------------------------------------
+  # Create a constraint
+  # ---------------------------------------------------------------------------
+
+  def createConstraint (self, tableName, constraintDef, codeOnly = False):
+    """
+    This function creates a constraint for the given table using the constraint
+    definition.
+
+    @param tableName: name of the table for which an index should be created
+    @param constraintDef: a dictionary of the constraint to be created 
+    @param codeOnly: if TRUE no operation takes place, but only the code will
+        be returned.
+    @return: a tuple of sequences (prologue, body, epliogue) containing the
+        code to perform the action.
+    """
+    res = Base.Creation.createConstraint (self, tableName, constraintDef,
+                                          codeOnly)
+    cName     = self._shortenName (constraintDef ['name'])
+    fields    = constraintDef ['fields']
+    reftable  = constraintDef ['reftable']
+    reffields = constraintDef ['reffields']
+
+    body = res [1]
+    body.append (u"ALTER TABLE %s ADD CONSTRAINT %s FOREIGN KEY (%s) "
+                  "REFERENCES %s (%s)%s" % \
+         (tableName, cName, string.join (fields, ", "), reftable,
+          string.join (reffields, ", "), self.END_COMMAND))
+
+    if not codeOnly:
+      self._executeCodeTuple (res)
+
+    return res
+
+
+  # ---------------------------------------------------------------------------
+  # Create code for a single field definition
+  # ---------------------------------------------------------------------------
+
+  def _processField (self, tableName, fieldDef, forAlter = False):
+    """
+    This function creates a portion of code which defines the given field in
+    the table tableName. 
+    
+    @param tableName: the table this field belongs to.
+    @param fieldDef: the dictionary describing the field.
+    @param forAlter: If TRUE this function produces code for a table
+        modification, otherwise for a table creation.
+    @return: a tuple of sequences (prologue, body, epliogue) containing the
+        code to perform the action.
+    """
+
+    res = Base.Creation._processField (self, tableName, fieldDef, forAlter)
+    body = res [1]
+
+    body.append (self._composeField (tableName, fieldDef, forAlter))
+
+    if fieldDef.has_key ('defaultwith'):
+      self._defaultwith (res, tableName, fieldDef, forAlter)
+
+    if fieldDef.has_key ('default') and fieldDef ['default']:
+      default = fieldDef ['default']
+      if default [:8].upper () != 'DEFAULT ':
+        default = "DEFAULT %s" % default
+      self._setColumnDefault (res, tableName, fieldDef, forAlter, default)
+
+    self._integrateNullable (res, tableName, fieldDef, forAlter)
+
+    return res
+
+
+  # ---------------------------------------------------------------------------
+  # Handle the nullable flag of a field 
+  # ---------------------------------------------------------------------------
+
+  def _integrateNullable (self, code, tableName, fieldDef, forAlter):
+    """
+    This function handles the nullable flag of a field. If the field is not
+    nullable the last line of the code's body sequence will be modified on a
+    create-action, or an 'alter table'-statement is added to the code's
+    epilogue. @see: _setColumnDefault ()
+
+    @param code: code-tuple which get's the result. If forAlter is FALSE this
+        function assumes the field's code is the last line in code.body
+    @param tableName: name of the table the field belongs to
+    @param fieldDef: dictionary describing the field
+    @param forAlter: if TRUE, the field definition is used in a table
+        modification, otherwise in a table creation.
+    """
+    if fieldDef.has_key ('nullable') and not fieldDef ['nullable']:
+      self._setColumnDefault (code, tableName, fieldDef, forAlter, "NOT NULL")
+
+
+  # ---------------------------------------------------------------------------
+  # Process a defaultwith attribute
+  # ---------------------------------------------------------------------------
+
+  def _defaultwith (self, code, tableName, fieldDef, forAlter):
+    """
+    This function could be overriden by any descendants to create code for
+    special defaults like 'serial' or 'timestamp'.
+
+    @param code: code-tuple to merge the result in
+    @param tableName: name of the table
+    @param fieldDef: dictionary describing the field with the default
+    @param forAlter: TRUE if the definition is used in a table modification
+    """
+    pass
+
+
+  # ---------------------------------------------------------------------------
+  # Set a default value for a given column
+  # ---------------------------------------------------------------------------
+
+  def _setColumnDefault (self, code, tableName, fieldDef, forAlter, default):
+    """
+    This function sets a default value for a given column. If it is called for
+    a table modification the epilogue of the code-block will be modified.
+    On a table creation, this function assumes the field's code is in the last
+    line of the code-block's body sequence.
+
+    @param code: code-tuple which get's the result. If forAlter is FALSE this
+        function assumes the field's code is the last line in code.body
+    @param tableName: name of the table the field belongs to
+    @param fieldDef: dictionary describing the field
+    @param forAlter: if TRUE, the field definition is used in a table
+        modification, otherwise in a table creation.
+    @param default: string with the default value for the column
+    """
+    if forAlter:
+      code [2].append (u"ALTER TABLE %s ALTER COLUMN %s SET %s%s" % \
+          (tableName, fieldDef ['name'], default, self.END_COMMAND))
+    else:
+      code [1][-1] += " %s" % default
+
+
+  # ---------------------------------------------------------------------------
+  # Compose a field from 'fieldname fieldtype'
+  # ---------------------------------------------------------------------------
+
+  def _composeField (self, tableName, fieldDefinition, forAlter):
+    """
+    This function composes a field definition of the form <fieldname>
+    <fieldtype> where the latter one has been translated using the
+    _translateType function.
+
+    @param tableName: name of the table the field belongs to
+    @param fieldDefinition: the dictionary describing the field
+    @return: string containing fieldname and fieldtype
+    """
+    res = "%s %s" % (fieldDefinition ['name'],
+                     self._translateType (fieldDefinition))
+    return res
+    
+
+
+  # ---------------------------------------------------------------------------
+  # Execute all parts of a code-tuple
+  # ---------------------------------------------------------------------------
+
+  def _executeCodeTuple (self, code):
+    """
+    This function executes the given code-tuple using the instances connection.
+    @param code: tuple of n code-sequences. All elements of each sequence is
+        treated as single statement which gets executed via
+        conneciton.makecursor ()
+    """
+    if self.connection is not None:
+      for block in range (len (code)):
+        for statement in code [block]:
+          if len (statement):
+            cursor = self.connection.makecursor (statement)
+            cursor.close ()
+
+      self.connection.commit ()
+
+
+  # ---------------------------------------------------------------------------
+  # A string becomes either varchar or text 
+  # ---------------------------------------------------------------------------
+
+  def string (self, fieldDefinition):
+    """
+    This function creates a native datatype for a string field. If a length is
+    defined it results in a 'varchar'- otherwise in a 'text'-field
+
+    @param fieldDefinition: dictionary describing the field
+    @return: varchar (length) or text
+    """
+
+    if fieldDefinition.has_key ('length'):
+      return "varchar (%s)" % fieldDefinition ['length']
+    else:
+      return "text"
+
+
+  # ---------------------------------------------------------------------------
+  # Keep date as is
+  # ---------------------------------------------------------------------------
+
+  def date (self, fieldDefinition):
+    """
+    This function returns the native datatype for 'date'-fields
+
+    @param fieldDefinition: dictionary describing the field
+    @return: 'date'
+    """
+    return "date"
+
+
+  # ---------------------------------------------------------------------------
+  # Keep time as is
+  # ---------------------------------------------------------------------------
+
+  def time (self, fieldDefinition):
+    """
+    This function returns the native datatype for 'time'-fields
+
+    @param fieldDefinition: dictionary describing the field
+    @return: 'time'
+    """
+    return "time"
+
+
+  # ---------------------------------------------------------------------------
+  # Keep datetime as is
+  # ---------------------------------------------------------------------------
+
+  def datetime (self, fieldDefinition):
+    """
+    This function returns the native datatype for 'datetime'-fields
+
+    @param fieldDefinition: dictionary describing the field
+    @return: 'dateime'
+    """
+    return "datetime"
+
+
+
+# =============================================================================
+# Modules self test code
+# =============================================================================
+
+if __name__ == '__main__':
+  def dumpTuple (aTuple):
+    print "\nPrologue:"
+    print "---------"
+    for line in aTuple [0]:
+      print ">>  %s" % line
+    print "\nBody:"
+    print "------"
+    for line in aTuple [1]:
+      print ">>  %s" % line
+    print "\nEpilogue:"
+    print "---------"
+    for line in aTuple [2]:
+      print ">>  %s" % line
+
+  cr = Creation ()
+  print "Hey!"
+  fields = [{'name'    : 'gnue_id',
+             'type'    : 'string',
+             'length'  : 32,
+             'nullable': False},
+            {'name'    : 'address_code',
+             'type'    : 'string',
+             'length'  : 2,
+             'nullable': True},
+            {'name'    : 'fooserial',
+             'type'    : 'string',
+             'length'  : 6,
+             'nullable': False,
+             'defaultwith': 'serial',
+             'default': ''}]
+
+  tdef = {'name': 'address_country',
+          'fields': fields,
+          'primarykey': {
+            'name': 'pk_gnue_id_address_country',
+            'fields': ['gnue_id']},
+          'indices': [
+            {'name': 'code_index',
+             'unique': True,
+             'fields': ['address_code']},
+            {'name': 'silly_index',
+             'fields': ['address_code', 'gnue_id']}],
+          'constraints': [
+            {'name': 'fake_constraint',
+             'type': 'foreignkey',
+             'fields': ['address_code', 'fake'],
+             'reftable': 'foobar',
+             'reffields': ['gnue_id', 'trash']}
+          ]
+         }
+          
+  res = cr.createTable (tdef, True)
+  dumpTuple (res)
+
+  del tdef ['primarykey']
+  res = cr.modifyTable (tdef, True)
+  dumpTuple (res)


Property changes on: 
trunk/gnue-common/src/datasources/drivers/DBSIG2/Schema/Creation/Creation.py
___________________________________________________________________
Name: svn:keywords
   + Id

Added: 
trunk/gnue-common/src/datasources/drivers/DBSIG2/Schema/Creation/__init__.py
===================================================================


Property changes on: 
trunk/gnue-common/src/datasources/drivers/DBSIG2/Schema/Creation/__init__.py
___________________________________________________________________
Name: svn:keywords
   + Id

Added: trunk/gnue-common/src/datasources/drivers/DBSIG2/Schema/__init__.py
===================================================================
--- trunk/gnue-common/src/datasources/drivers/DBSIG2/Schema/__init__.py 
2004-06-18 22:11:34 UTC (rev 5919)
+++ trunk/gnue-common/src/datasources/drivers/DBSIG2/Schema/__init__.py 
2004-06-21 18:11:52 UTC (rev 5920)
@@ -0,0 +1,2 @@
+# Indicate that this is no plugin
+__noplugin__ = True

Added: 
trunk/gnue-common/src/datasources/drivers/interbase/Schema/Creation/Creation.py
===================================================================
--- 
trunk/gnue-common/src/datasources/drivers/interbase/Schema/Creation/Creation.py 
    2004-06-18 22:11:34 UTC (rev 5919)
+++ 
trunk/gnue-common/src/datasources/drivers/interbase/Schema/Creation/Creation.py 
    2004-06-21 18:11:52 UTC (rev 5920)
@@ -0,0 +1,157 @@
+#
+# This file is part of GNU Enterprise.
+#
+# GNU Enterprise is free software; you can redistribute it
+# and/or modify it under the terms of the GNU General Public
+# License as published by the Free Software Foundation; either
+# version 2, or (at your option) any later version.
+#
+# GNU Enterprise is distributed in the hope that it will be
+# useful, but WITHOUT ANY WARRANTY; without even the implied
+# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+# PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public
+# License along with program; see the file COPYING. If not,
+# write to the Free Software Foundation, Inc., 59 Temple Place
+# - Suite 330, Boston, MA 02111-1307, USA.
+#
+# Copyright 2001-2004 Free Software Foundation
+#
+# $Id$
+
+from gnue.common.datasources.drivers.DBSIG2.Schema.Creation import \
+    Creation as Base
+
+
+# =============================================================================
+# Class implementing schema creation for Interbase (5.x/6.x), Firebird (1.x)
+# =============================================================================
+
+class Creation (Base.Creation):
+
+  MAX_NAME_LENGTH = 31
+  ALTER_MULTIPLE  = False
+
+  # ---------------------------------------------------------------------------
+  # Process a defaultwith attribute
+  # ---------------------------------------------------------------------------
+
+  def _defaultwith (self, code, tableName, fieldDef, forAlter):
+    if fieldDef ['defaultwith'] == 'serial':
+      seq = self._getSequenceName (tableName, fieldDef)
+      code [0].append (u"CREATE GENERATOR %s%s" % (seq, self.END_COMMAND))
+      fieldDef ['default'] = "nextval ('%s')" % seq
+      fName = fieldDef ['name']
+
+      code [2].append (u"SET TERM ^ ;")
+      code [2].append (u"CREATE TRIGGER trg_%s FOR %s ACTIVE "
+          "BEFORE INSERT POSITION 0 AS BEGIN "
+          "IF (NEW.%s IS NULL) THEN BEGIN NEW.%s = GEN_ID (%s,1); END"
+          "END ^" % (fName, tableName, fName, fName, seq))
+      code [2].append (u"SET TERM ; ^")
+
+    elif fieldDef ['defaultwith'] == 'timestamp':
+      fieldDef ['default'] = "'NOW'"
+    
+
+
+  # ---------------------------------------------------------------------------
+  # A key is an integer
+  # ---------------------------------------------------------------------------
+
+  def key (self, fieldDefinition):
+    """
+    Native datatype for a 'key'-field is 'integer'
+
+    @param fieldDefinition: dictionary describing the field
+    @return: 'integer'
+    """
+    return "integer"
+
+
+  # ---------------------------------------------------------------------------
+  # Create a native type representation for strings
+  # ---------------------------------------------------------------------------
+
+  def string (self, fieldDefinition):
+    """
+    This function returns an apropriate type for strings according to their
+    lenght. 
+
+    @param fieldDefinition: dictionary describing the field
+    @return: string with the native datatype
+    """
+
+    if fieldDefinition.has_key ('length') and \
+       fieldDefinition ['length'] <= 32767:
+      return "varchar (%s)" % fieldDefinition ['length']
+
+    elif not fieldDefinition.has_key ('length'):
+
+      return "varchar (32767)"
+    else:
+      return "blob"
+
+
+  # ---------------------------------------------------------------------------
+  # Create an apropriate type for a number
+  # ---------------------------------------------------------------------------
+
+  def number (self, fieldDefinition):
+    """
+    This function returns an apropriate type for a number according to the
+    given length and precision.
+
+    @param fieldDefinition: dictionary describing the field
+    @return: string with the native datatype
+    """
+    scale  = 0
+    length = 0
+
+    if fieldDefinition.has_key ('precision'):
+      scale = fieldDefinition ['precision']
+    if fieldDefinition.has_key ('length'):
+      length = fieldDefinition ['length']
+
+    if scale == 0:
+      if length <= 4:
+        return "smallint"
+      elif length <= 9:
+        return "integer"
+      else:
+        return "numeric (%s,0)" % length
+    else:
+      return "numeric (%s,%s)" % (length, scale)
+
+
+  # ---------------------------------------------------------------------------
+  # Interbase has no native boolean type
+  # ---------------------------------------------------------------------------
+
+  def boolean (self, fieldDefinition):
+    """
+    This funciton returns a smallint, since interbase/firebird has no native
+    boolean type. Using a domain won't work by now, due to limitations on the
+    introspection. Even adding a CHECK-clause to the datatype doesn't work.
+
+    @param fieldDefinition: dictionary describing the field
+    @return: 'smallint'
+    """
+    return "smallint"
+
+
+  # ---------------------------------------------------------------------------
+  # Native datatype for datetime
+  # ---------------------------------------------------------------------------
+
+  def datetime (self, fieldDefinition):
+    """
+    This function returns the native type for a datetime value, which is
+    'timestamp' for interbase.
+
+    @param fieldDefinition: dictionary describing the field
+    @return: 'timestamp'
+    """
+    return "timestamp"
+


Property changes on: 
trunk/gnue-common/src/datasources/drivers/interbase/Schema/Creation/Creation.py
___________________________________________________________________
Name: svn:keywords
   + Id


Property changes on: 
trunk/gnue-common/src/datasources/drivers/interbase/Schema/Creation/__init__.py
___________________________________________________________________
Name: svn:keywords
   + Id

Modified: 
trunk/gnue-common/src/datasources/drivers/interbase/Schema/Discovery/Introspection.py
===================================================================
--- 
trunk/gnue-common/src/datasources/drivers/interbase/Schema/Discovery/Introspection.py
       2004-06-18 22:11:34 UTC (rev 5919)
+++ 
trunk/gnue-common/src/datasources/drivers/interbase/Schema/Discovery/Introspection.py
       2004-06-21 18:11:52 UTC (rev 5920)
@@ -101,7 +101,7 @@
 
     statement = "select rdb$relation_name, rdb$view_source "+\
                        "from rdb$relations " + \
-                       "where rdb$relation_name = '%s'" % (name)
+                       "where rdb$relation_name = '%s'" % (name.upper ())
 
     cursor = self._connection.native.cursor()
     cursor.execute(statement)
@@ -170,7 +170,7 @@
         attrs['datatype']='text'
 
       cursor.execute("select rdb$default_source from rdb$relation_fields"+ \
-                  " where rdb$relation_name = '%s' " % (parent.name)+ \
+                  " where rdb$relation_name = '%s' " % (parent.name.upper ())+ 
\
                   " and rdb$field_name = '%s'" % (upper(attrs['name'])))
       defrs = cursor.fetchone()
       try:

Modified: 
trunk/gnue-common/src/datasources/drivers/interbase/interbase/Connection.py
===================================================================
--- trunk/gnue-common/src/datasources/drivers/interbase/interbase/Connection.py 
2004-06-18 22:11:34 UTC (rev 5919)
+++ trunk/gnue-common/src/datasources/drivers/interbase/interbase/Connection.py 
2004-06-21 18:11:52 UTC (rev 5920)
@@ -48,8 +48,9 @@
 from gnue.common.datasources.drivers import DBSIG2
 from DataObject import *
 from gnue.common.datasources.drivers.interbase.Schema.Discovery.Introspection 
import Introspection
+from gnue.common.datasources.drivers.interbase.Schema.Creation.Creation \
+    import Creation
 
-
 try:
   import kinterbasdb as SIG2api
 except ImportError:
@@ -66,6 +67,7 @@
   _boolean_false     = 0
   _numbers_as_string = False
   defaultBehavior    = Introspection
+  defaultCreator     = Creation
   _driver            = SIG2api
   _DatabaseError     = SIG2api.DatabaseError
   supportedDataObjects = {

Added: 
trunk/gnue-common/src/datasources/drivers/mysql/Schema/Creation/Creation.py
===================================================================
--- trunk/gnue-common/src/datasources/drivers/mysql/Schema/Creation/Creation.py 
2004-06-18 22:11:34 UTC (rev 5919)
+++ trunk/gnue-common/src/datasources/drivers/mysql/Schema/Creation/Creation.py 
2004-06-21 18:11:52 UTC (rev 5920)
@@ -0,0 +1,164 @@
+#
+# This file is part of GNU Enterprise.
+#
+# GNU Enterprise is free software; you can redistribute it
+# and/or modify it under the terms of the GNU General Public
+# License as published by the Free Software Foundation; either
+# version 2, or (at your option) any later version.
+#
+# GNU Enterprise is distributed in the hope that it will be
+# useful, but WITHOUT ANY WARRANTY; without even the implied
+# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+# PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public
+# License along with program; see the file COPYING. If not,
+# write to the Free Software Foundation, Inc., 59 Temple Place
+# - Suite 330, Boston, MA 02111-1307, USA.
+#
+# Copyright 2001-2004 Free Software Foundation
+#
+# $Id$
+
+from gnue.common.datasources.drivers.DBSIG2.Schema.Creation import \
+    Creation as Base
+
+
+# =============================================================================
+# Class implementing schema creation for MySQL (3.x/4.x)
+# =============================================================================
+
+class Creation (Base.Creation):
+
+  MAX_NAME_LENGTH = 64
+
+  # ---------------------------------------------------------------------------
+  # Handle special defaults
+  # ---------------------------------------------------------------------------
+
+  def _defaultwith (self, code, tableName, fieldDef, forAlter):
+    """
+    This function adds 'auto_increment' for 'serials' and checks for the proper
+    fieldtype on 'timestamps'
+
+    @param code: code-tuple to merge the result in
+    @param tableName: name of the table
+    @param fieldDef: dictionary describing the field with the default
+    @param forAlter: TRUE if the definition is used in a table modification
+    """
+    if fieldDef ['defaultwith'] == 'serial':
+      seq = self._getSequenceName (tableName, fieldDef)
+      code [1] [-1] += " AUTO_INCREMENT"
+      fieldDef ['default'] = "nextval ('%s')" % seq
+
+    elif fieldDef ['defaultwith'] == 'timestamp':
+      if fieldDef ['type'] != 'timestamp':
+        fieldDef ['type'] = 'timestamp'
+
+        code [1].pop ()
+        code [1].append (self._composeField (tableName, fieldDef, forAlter))
+        
+        print _("WARNING: changing column type of '%(table)s.%(column)s' "
+                "to 'timestamp'") \
+              % {'table': tableName,
+                 'column': fieldDef ['name']}
+
+
+  # ---------------------------------------------------------------------------
+  # A key is an unsigned integer
+  # ---------------------------------------------------------------------------
+
+  def key (self, fieldDefinition):
+    """
+    Native datatype for a 'key'-field is 'unsigned integer'
+
+    @param fieldDefinition: dictionary describing the field
+    @return: 'int unsigned'
+    """
+    return "int unsigned"
+
+
+  # ---------------------------------------------------------------------------
+  # Translate a string into an apropriate native type
+  # ---------------------------------------------------------------------------
+
+  def string (self, fieldDefinition):
+    """
+    This function returns an apropriate native type for a string. If the length
+    is given and below 255 character the result is a varchar, otherwist text.
+
+    @param fieldDefinition: dictionary describing the field
+    @return: string with the native datatype
+    """
+    if fieldDefinition.has_key ('length') and fieldDefinition ['length'] <= 
255:
+      return "varchar (%s)" % fieldDefinition ['length']
+    else:
+      return "text"
+
+
+  # ---------------------------------------------------------------------------
+  # Create an apropriate type for a number
+  # ---------------------------------------------------------------------------
+
+  def number (self, fieldDefinition):
+    """
+    This function returns an apropriate type for a number according to the
+    given length and precision.
+
+    @param fieldDefinition: dictionary describing the field
+    @return: string with the native datatype
+    """
+    scale  = 0
+    length = 0
+
+    if fieldDefinition.has_key ('precision'):
+      scale = fieldDefinition ['precision']
+    if fieldDefinition.has_key ('length'):
+      length = fieldDefinition ['length']
+
+    if scale == 0:
+      if length <= 4:
+        return "smallint"
+      elif length <= 9:
+        return "int"
+      elif length <= 18:
+        return "bigint"
+      else:
+        return "decimal (%s,0)" % length
+    else:
+      return "decimal (%s,%s)" % (length, scale)
+
+
+  # ---------------------------------------------------------------------------
+  # MySQL has no native boolean type
+  # ---------------------------------------------------------------------------
+
+  def boolean (self, fieldDefinition):
+    """
+    MySQL has no native boolean type, so this function returns 'tinyint (1)
+    unsigned' instead.
+
+    @param fieldDefinition: dictionary describing the field
+    @return: 'tinyint (1) unsigned'
+    """
+    return "tinyint (1) unsigned"
+
+
+  # ---------------------------------------------------------------------------
+  # MySQL has a timestamp, which is needed for 'defaultwith timestamp'
+  # ---------------------------------------------------------------------------
+
+  def timestamp (self, fieldDefinition):
+    """
+    In MySQL timestamps are used for default values, otherwise we map to the
+    inherited typetransformation of 'timestamp'.
+
+    @param fieldDefinition: dictionary describing the field
+    @return: string with the native datatype
+    """
+    if fieldDefinition.has_key ('defaultwith') and \
+        fieldDefinition ['defaultwith'] == 'timestamp':
+      return "timestamp"
+    else:
+      return Base.Creation.timestamp (self, fieldDefinition)
+


Property changes on: 
trunk/gnue-common/src/datasources/drivers/mysql/Schema/Creation/Creation.py
___________________________________________________________________
Name: svn:keywords
   + Id


Property changes on: 
trunk/gnue-common/src/datasources/drivers/mysql/Schema/Creation/__init__.py
___________________________________________________________________
Name: svn:keywords
   + Id

Modified: 
trunk/gnue-common/src/datasources/drivers/mysql/Schema/Discovery/Introspection.py
===================================================================
--- 
trunk/gnue-common/src/datasources/drivers/mysql/Schema/Discovery/Introspection.py
   2004-06-18 22:11:34 UTC (rev 5919)
+++ 
trunk/gnue-common/src/datasources/drivers/mysql/Schema/Discovery/Introspection.py
   2004-06-21 18:11:52 UTC (rev 5920)
@@ -89,18 +89,24 @@
     statement = "DESCRIBE %s" % (name)
 
     cursor = self._connection.native.cursor()
-    cursor.execute(statement)
+    try:
+      try:
+        cursor.execute(statement)
 
-    rs = cursor.fetchone()
-    if rs:
-      schema = GIntrospection.Schema(attrs={'id':name, 'name':name,
+        rs = cursor.fetchone()
+        if rs:
+          schema = GIntrospection.Schema(attrs={'id':name, 'name':name,
                            'type':'table',
                            'primarykey': self.__getPrimaryKey(name,cursor)},
                            getChildSchema=self.__getFieldSchema)
-    else:
-      schema = None
+        else:
+          schema = None
 
-    cursor.close()
+      except:
+        schema = None
+
+    finally:
+      cursor.close()
     return schema
 
 

Modified: trunk/gnue-common/src/datasources/drivers/mysql/mysql/Connection.py
===================================================================
--- trunk/gnue-common/src/datasources/drivers/mysql/mysql/Connection.py 
2004-06-18 22:11:34 UTC (rev 5919)
+++ trunk/gnue-common/src/datasources/drivers/mysql/mysql/Connection.py 
2004-06-21 18:11:52 UTC (rev 5920)
@@ -49,6 +49,8 @@
 from gnue.common.datasources.drivers import DBSIG2
 from DataObject import *
 from gnue.common.datasources.drivers.mysql.Schema.Discovery.Introspection 
import Introspection
+from gnue.common.datasources.drivers.mysql.Schema.Creation.Creation import \
+    Creation
 
 
 ######################################################################
@@ -59,6 +61,7 @@
 
   _driver = MySQLdb
   defaultBehavior = Introspection
+  defaultCreator  = Creation
   _DatabaseError = MySQLdb.DatabaseError
   supportedDataObjects = {
     'object': DataObject_Object,

Modified: 
trunk/gnue-common/src/datasources/drivers/postgresql/Base/Connection.py
===================================================================
--- trunk/gnue-common/src/datasources/drivers/postgresql/Base/Connection.py     
2004-06-18 22:11:34 UTC (rev 5919)
+++ trunk/gnue-common/src/datasources/drivers/postgresql/Base/Connection.py     
2004-06-21 18:11:52 UTC (rev 5920)
@@ -27,6 +27,7 @@
 #
 # NOTES:
 #
+# $Id: $
 
 __all__ = ['Connection']
 
@@ -38,6 +39,7 @@
 from gnue.common.datasources.drivers import DBSIG2
 from DataObject import *
 from gnue.common.datasources.drivers.postgresql.Schema.Discovery.Introspection 
import Introspection
+from gnue.common.datasources.drivers.postgresql.Schema.Creation.Creation 
import Creation
 
 
 ######################################################################
@@ -47,6 +49,7 @@
 class Connection(DBSIG2.Connection):
 
   defaultBehavior = Introspection
+  defaultCreator  = Creation
   supportedDataObjects = {
     'object': DataObject_Object,
     'sql':    DataObject_SQL


Property changes on: 
trunk/gnue-common/src/datasources/drivers/postgresql/Base/Connection.py
___________________________________________________________________
Name: svn:keywords
   + +Id

Added: 
trunk/gnue-common/src/datasources/drivers/postgresql/Schema/Creation/Creation.py
===================================================================
--- 
trunk/gnue-common/src/datasources/drivers/postgresql/Schema/Creation/Creation.py
    2004-06-18 22:11:34 UTC (rev 5919)
+++ 
trunk/gnue-common/src/datasources/drivers/postgresql/Schema/Creation/Creation.py
    2004-06-21 18:11:52 UTC (rev 5920)
@@ -0,0 +1,135 @@
+#
+# This file is part of GNU Enterprise.
+#
+# GNU Enterprise is free software; you can redistribute it
+# and/or modify it under the terms of the GNU General Public
+# License as published by the Free Software Foundation; either
+# version 2, or (at your option) any later version.
+#
+# GNU Enterprise is distributed in the hope that it will be
+# useful, but WITHOUT ANY WARRANTY; without even the implied
+# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+# PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public
+# License along with program; see the file COPYING. If not,
+# write to the Free Software Foundation, Inc., 59 Temple Place
+# - Suite 330, Boston, MA 02111-1307, USA.
+#
+# Copyright 2001-2004 Free Software Foundation
+#
+# $Id$
+
+from gnue.common.datasources.drivers.DBSIG2.Schema.Creation import \
+    Creation as Base
+
+
+# =============================================================================
+# Class implementing schema creation for PostgreSQL
+# =============================================================================
+
+class Creation (Base.Creation):
+
+  MAX_NAME_LENGTH = 31
+  ALTER_MULTIPLE  = False
+
+
+  # ---------------------------------------------------------------------------
+  # Handle special defaults
+  # ---------------------------------------------------------------------------
+
+  def _defaultwith (self, code, tableName, fieldDef, forAlter):
+    """
+    This function creates a sequence for 'serials' and sets the default for
+    'timestamps' to 'now ()'
+
+    @param code: code-tuple to merge the result in
+    @param tableName: name of the table
+    @param fieldDef: dictionary describing the field with the default
+    @param forAlter: TRUE if the definition is used in a table modification
+    """
+    if fieldDef ['defaultwith'] == 'serial':
+      seq = self._getSequenceName (tableName, fieldDef)
+      code [0].append (u"CREATE SEQUENCE %s%s" % (seq, self.END_COMMAND))
+      fieldDef ['default'] = "nextval ('%s')" % seq
+
+    elif fieldDef ['defaultwith'] == 'timestamp':
+      fieldDef ['default'] = "now()"
+
+
+
+  # ---------------------------------------------------------------------------
+  # A key is an integer
+  # ---------------------------------------------------------------------------
+
+  def key (self, fieldDefinition):
+    """
+    Native datatype for a 'key'-field is 'int8'
+
+    @param fieldDefinition: dictionary describing the field
+    @return: string with the native datatype 'int8'
+    """
+    return "int8"
+
+
+  # ---------------------------------------------------------------------------
+  # Create an apropriate type for a number
+  # ---------------------------------------------------------------------------
+
+  def number (self, fieldDefinition):
+    """
+    This function returns an apropriate type for a number according to the
+    given length and precision.
+
+    @param fieldDefinition: dictionary describing the field
+    @return: string with the native datatype
+    """
+    scale  = 0
+    length = 0
+
+    if fieldDefinition.has_key ('precision'):
+      scale = fieldDefinition ['precision']
+    if fieldDefinition.has_key ('length'):
+      length = fieldDefinition ['length']
+
+    if scale == 0:
+      if length <= 4:
+        return "smallint"
+      elif length <= 9:
+        return "integer"
+      elif length <= 18:
+        return "bigint"
+      else:
+        return "numeric (%s,0)" % length
+    else:
+      return "numeric (%s,%s)" % (length, scale)
+
+
+  # ---------------------------------------------------------------------------
+  # Native datatype for boolean is boolean
+  # ---------------------------------------------------------------------------
+
+  def boolean (self, fieldDefinition):
+    """
+    This funciton returns the native data type for a boolean, which is
+    'boolean'
+
+    @param fieldDefinition: dictionary describing the field
+    @return: 'boolean'
+    """
+    return "boolean"
+
+
+  # ---------------------------------------------------------------------------
+  # Native datatype for datetime
+  # ---------------------------------------------------------------------------
+
+  def datetime (self, fieldDefinition):
+    """
+    This function returns the native type for a datetime value
+
+    @param fieldDefinition: dictionary describing the field
+    @return: 'timestamp without time zone'
+    """
+    return "timestamp without time zone"
+


Property changes on: 
trunk/gnue-common/src/datasources/drivers/postgresql/Schema/Creation/Creation.py
___________________________________________________________________
Name: svn:keywords
   + Id


Property changes on: 
trunk/gnue-common/src/datasources/drivers/postgresql/Schema/Creation/__init__.py
___________________________________________________________________
Name: svn:keywords
   + Id

Modified: 
trunk/gnue-common/src/datasources/drivers/postgresql/Schema/Discovery/Introspection.py
===================================================================
--- 
trunk/gnue-common/src/datasources/drivers/postgresql/Schema/Discovery/Introspection.py
      2004-06-18 22:11:34 UTC (rev 5919)
+++ 
trunk/gnue-common/src/datasources/drivers/postgresql/Schema/Discovery/Introspection.py
      2004-06-21 18:11:52 UTC (rev 5920)
@@ -97,8 +97,9 @@
     statement = "select relname, relkind, oid from pg_class " + \
             "where relname = '%s'" % (name)
 
-    cursor = self._connection.native.cursor()
-    cursor.execute(statement)
+    cursor = self._connection.makecursor (statement)
+    # cursor = self._connection.native.cursor()
+    # cursor.execute(statement)
 
     rs = cursor.fetchone()
     if rs:

Deleted: trunk/gnue-common/src/schema/scripter/Definition.py
===================================================================
--- trunk/gnue-common/src/schema/scripter/Definition.py 2004-06-18 22:11:34 UTC 
(rev 5919)
+++ trunk/gnue-common/src/schema/scripter/Definition.py 2004-06-21 18:11:52 UTC 
(rev 5920)
@@ -1,282 +0,0 @@
-#
-# This file is part of GNU Enterprise.
-#
-# GNU Enterprise is free software; you can redistribute it
-# and/or modify it under the terms of the GNU General Public
-# License as published by the Free Software Foundation; either
-# version 2, or (at your option) any later version.
-#
-# GNU Enterprise is distributed in the hope that it will be
-# useful, but WITHOUT ANY WARRANTY; without even the implied
-# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-# PURPOSE. See the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public
-# License along with program; see the file COPYING. If not,
-# write to the Free Software Foundation, Inc., 59 Temple Place
-# - Suite 330, Boston, MA 02111-1307, USA.
-#
-# Copyright 2001-2004 Free Software Foundation
-#
-# $Id$
-#
-
-# =============================================================================
-# The basic definition class
-# =============================================================================
-
-class Definition:
-  """
-  This class implements a basic definition. Each definition has a name, and a
-  list of sequences: a prologue, a header, a body, a footer and finally an
-  epilogue. A dump of a definition is done by concatenating all these sequences
-  line by line.
-  """
-
-  # ---------------------------------------------------------------------------
-  # Constructor
-  # ---------------------------------------------------------------------------
-
-  def __init__ (self, name = None):
-    self.name     = name
-
-    self.prologue = []
-    self.header   = []
-    self.body     = []
-    self.footer   = []
-    self.epilogue = []
-
-
-  # ---------------------------------------------------------------------------
-  # Return all sequences as one single list 
-  # ---------------------------------------------------------------------------
-
-  def merge (self):
-    """
-    This function merges all sequences into a single list
-    """
-    return self.prologue + self.header + self.body + self.footer + \
-           self.epilogue
-
-  # ---------------------------------------------------------------------------
-  # Dump a definition to a given destination
-  # ---------------------------------------------------------------------------
-
-  def writeDefinition (self, destination, encoding = "UTF-8"):
-    """
-    This method writes all sequences to the given destination using 'encoding',
-    which defaults to 'UTF-8'. The definition is written in the following
-    order: prologue, header, body, footer, epilogue
-    """
-    for line in self.merge ():
-      destination.write (line.encode (encoding) + "\n")
-
-
-
-# =============================================================================
-# Basic class for schema definitions
-# =============================================================================
-
-class SchemaDefinition (Definition):
-  """
-  This class introduces another sequence "fields" to the definition. This list
-  holds GSField/GSIndexField instances.
-  """
-  # ---------------------------------------------------------------------------
-  # Constructor
-  # ---------------------------------------------------------------------------
-
-  def __init__ (self, name = None):
-    Definition.__init__ (self, name)
-    self.fields = []
-
-
-
-# =============================================================================
-# IndexDefinition introduces unique-flag
-# =============================================================================
-
-class IndexDefinition (SchemaDefinition):
-  """
-  This class has another public property 'unique', which describes wether a
-  given index has the unique-flag set or not.
-  """
-  # ---------------------------------------------------------------------------
-  # Constructor
-  # ---------------------------------------------------------------------------
-
-  def __init__ (self, name = None, unique = False):
-    SchemaDefinition.__init__ (self, name)
-    self.unique = unique
-
-
-# =============================================================================
-# Constraint definition
-# =============================================================================
-
-class ConstraintDefinition (SchemaDefinition):
-  """
-  This class implements reference-constraints
-  """
-  # ---------------------------------------------------------------------------
-  # Constructor
-  # ---------------------------------------------------------------------------
-  def __init__ (self, name = None, kind = None):
-    SchemaDefinition.__init__ (self, name)
-
-    self.kind      = kind
-    self.reftable  = None
-    self.reffields = []
-
-
-# =============================================================================
-# PhaseDefinition
-# =============================================================================
-
-class PhaseDefinition (Definition):
-
-  def __init__ (self, name = None, phase = 0):
-    Definition.__init__ (self, name)
-    self.phase = phase
-
-
-# =============================================================================
-# TableDefinition 
-# =============================================================================
-class TableDefinition:
-  """
-  """
-
-  # ---------------------------------------------------------------------------
-  # Constructor
-  # ---------------------------------------------------------------------------
-  def __init__ (self, name = None, action = 'create'):
-    self.name        = name
-    self.action      = action
-    self.fields      = []
-    self.primaryKey  = None
-    self.indices     = {}
-    self.constraints = {}
-
-    self.phases      = {}
-
-
-  # ---------------------------------------------------------------------------
-  # Create a new IndexDefinition and add it to our index-sequence
-  # ---------------------------------------------------------------------------
-
-  def newIndex (self, name, unique = False):
-    """
-    This function creates a new IndexDefinition instance, adds it into the
-    table definitions' index-dictionary and returns it as a function result.
-    """
-    index = IndexDefinition (name, unique)
-    self.indices [index.name] = index
-    return index
-
-
-  # ---------------------------------------------------------------------------
-  # Create a new ConstraintDefinition
-  # ---------------------------------------------------------------------------
-
-  def newConstraint (self, name, kind):
-    """
-    This function creates a new ConstraintDefinition instance, adds it into the
-    table definitions' constraint-dictionary and returns it.
-    """
-    constraint = ConstraintDefinition (name, kind)
-    self.constraints [constraint.name] = constraint
-    return constraint
-
-
-  # ---------------------------------------------------------------------------
-  # Create a new primary key definition
-  # ---------------------------------------------------------------------------
-
-  def addPrimaryKey (self, name):
-    """
-    This function returns a new primary key definition.
-    """
-    self.primaryKey = IndexDefinition (name, True)
-    return self.primaryKey
-
-
-  # ---------------------------------------------------------------------------
-  # Returns a field by name from the fields collection
-  # ---------------------------------------------------------------------------
-  def getField (self, fieldName):
-    """
-    This function searches a field with the name 'fieldName' in it's fields
-    list. None is returned if no field with the given name was found.
-    """
-    for field in self.fields:
-      if field.name == fieldName:
-        return field
-
-    return None
-
-
-  # ---------------------------------------------------------------------------
-  # Add a given phasedefinition and return it
-  # ---------------------------------------------------------------------------
-
-  def getPhase (self, phase):
-    """
-    This function returns the PhaseDefinition instance of @phase; if it's not
-    defined it will be created
-    """
-    if not self.phases.has_key (phase):
-      self.phases [phase] = PhaseDefinition (self.name, phase)
-      
-    return self.phases [phase]
-
-
-# =============================================================================
-# Definition class for data rows
-# =============================================================================
-class DataDefinition (Definition):
-  """
-  This class encapsulates data to be inserted into the table 'name'. The
-  collection rows is a list of RowDefinition objects. These objects hold the
-  actual insertion data.
-  """
-
-  # ---------------------------------------------------------------------------
-  # Constructor
-  # ---------------------------------------------------------------------------
-  def __init__ (self, name = None):
-    Definition.__init__ (self, name)
-
-    self.rows     = []
-    self.tableDef = None
-
-  # ---------------------------------------------------------------------------
-  # Add a new row to the collection
-  # ---------------------------------------------------------------------------
-  """
-  This method extends the rows collection and returns a new RowDefinition
-  instance.
-  """
-  def addRow (self):
-    row = RowDefinition ()
-    self.rows.append (row)
-    return row
-
-
-# =============================================================================
-# Definition of a single data row
-# =============================================================================
-class RowDefinition:
-  """
-  This class encapsulates a single data row by providing three sequences: 
-  columns, values and types. Columns is a list with column-names, values a list
-  of corresponding values and types holds the columns datatypes.
-  """
-
-  # ---------------------------------------------------------------------------
-  # Constructor
-  # ---------------------------------------------------------------------------
-  def __init__ (self):
-    self.columns = []
-    self.values  = []
-    self.types   = []

Modified: trunk/gnue-common/src/schema/scripter/Scripter.py
===================================================================
--- trunk/gnue-common/src/schema/scripter/Scripter.py   2004-06-18 22:11:34 UTC 
(rev 5919)
+++ trunk/gnue-common/src/schema/scripter/Scripter.py   2004-06-21 18:11:52 UTC 
(rev 5920)
@@ -19,384 +19,492 @@
 # Copyright 2002-2004 Free Software Foundation
 #
 # $Id$
-#
 
 from gnue.common import VERSION
 from gnue.common.schema import GSParser
-from gnue.common.utils.FileUtils import openResource, dyn_import
+from gnue.common.utils.FileUtils import openResource
 from gnue.common.apps.GClientApp import GClientApp
-from processors import vendors
-from gnue.common.schema.scripter.Definition import *
+from gnue.common.datasources import GDataSource, GConditions
+
 from time import strftime
 from string import join
 
 import sys
 import os
 import re
+import copy
+import types
 
+
 # =============================================================================
-# Generate SQL files from GNUe Schema Definition files
+# Exceptions
 # =============================================================================
 
+class Error (gException):
+  pass
+
+class MissingKeyError (Error):
+  def __init__ (self, tableName):
+    msg = u_("Data row of table '%s' has no key fields") % tableName
+    Error.__init__ (self, msg)
+
+# =============================================================================
+# Load GNUe Schema Definition files and create a database schema for it
+# =============================================================================
+
 class Scripter (GClientApp):
 
   VERSION         = VERSION
   COMMAND         = "gnue-schema"
   NAME            = "GNUe Schema Scripter"
-  USAGE           = "[options] file [old-schema]"
-  SUMMARY = _("GNUe Schema Scripter creates SQL files based on GNUe "
+  USAGE           = "[options] gsd-file [gsd-file gsd-file ...]"
+  SUMMARY = _("GNUe Schema Scripter creates database schemas based on GNUe "
               "Schema Definitions.")
 
-  _PROC_PATH = "gnue.common.schema.scripter.processors.%s"
 
-
   # ---------------------------------------------------------------------------
   # Constructor
   # ---------------------------------------------------------------------------
 
   def __init__ (self, connections = None):
-    self.addCommandOption('drop_tables',longOption='drop-tables',
-        help=_("Generate commands to drop relevant tables.  * NOT 
IMPLEMENTED"))
+    self.addCommandOption ('connection', 'c', argument='connectionname',
+        help = _("Use the connection <connectionname> for creating the 
schema"))
 
-    self.addCommandOption('ignore_schema','S','no-schema',
-        help=_("Do not generate schema creation code.  * NOT IMPLEMENTED"))
+    self.addCommandOption ('output','o', argument='filename',
+        help = _("Also send the code for creating the schema to this file."))
 
-    self.addCommandOption('ignore_data','D','no-data',
-        help=_("Do not generate data insertion code.  * NOT IMPLEMENTED"))
+    self.addCommandOption ('file-only', 'f', default = False,
+        help = _("If this flag is set, only code is sent to the output file "
+                 "and the schema is not created automatically."))
 
-    self.addCommandOption('encoding', 'e', default='UTF-8', 
argument=_('encoding'),
-        help= _("The generated SQL script will be encoded using <encoding>. "
-                "Default encoding is UTF-8") )
+    self.addCommandOption ('mode', 'm', argument='both|schema|data',
+        default = 'both',
+        help = _("Mode of operation. If mode is 'schema', only schema "
+                 "creation is done. If mode is 'data' only data integration "
+                 "is done."))
 
-    self.addCommandOption('upgrade_schema','u','upgrade-schema',
-        help= _("Generate code to upgrade an older version of a schema to "
-                "the recent version. You must specify a previous schema "
-                "on the command line.  * NOT IMPLEMENTED") )
-
-    self.addCommandOption('upgrade_data','U',
-        help= _("Generate code to upgrade an older version of schema data to "
-                "the recent version. You must specify a previous schema "
-                "on the command line.  * NOT IMPLEMENTED") )
-
-    self.addCommandOption('help-vendors',shortOption='l', 
action=self.__listVendors,
-        help=_("List all supported vendors.") )
-
-    self.addCommandOption('output','o', argument='dest',
-        help= _("The destination for the created schemas. This can be in 
several "
-                "formats. If <dest> is a file name, then output is written to "
-                "this file. If <dest> is a directory, then <dest>/<Vendor>.sql 
"
-                "is created. The default is to create <Vendor>.sql in the "
-                "current directory. NOTE: the first form (<dest> as a 
filename) "
-                "is not supported for --vendors all.") )
-
-    self.addCommandOption('vendor','v', default="all", argument='vendor',
-        help= _("The vendor to create a script for. If <vendor> is 'all', then 
"
-                "scripts for all supported vendors will be created. <vendor> "
-                "can also be a comma-separated list."))
-
     ConfigOptions = {}
     GClientApp.__init__ (self, connections, 'schema', ConfigOptions)
-    self.__vendors = []
 
 
+
   # ---------------------------------------------------------------------------
   # Main program
   # ---------------------------------------------------------------------------
+
   def run (self):
-    self.__check_options ()
+    """
+    This is the main function of the whole process. It verifies the given
+    options, loads all schema definitions and then logs into the connection to
+    perform all actions requested.
+    """
 
+    self.__checkOptions ()
+
     try:
-      print _("Loading gsd file '%s' ...") % self.ARGUMENTS [0]
+      self.tables    = []
+      self.tabledata = []
 
-      self.schema = GSParser.loadFile (self.__input)
+      for item in range (len (self._files)):
+        print _("Loading gsd file '%s' ...") % self.ARGUMENTS [item]
 
+        try:
+          schema = GSParser.loadFile (self._files [item])
+          schema.walk (self.__iterateObjects)
+
+        finally:
+          self._files [item].close ()
+
     except Exception:
       print sys.exc_info () [1]
 
     else:
-      for vendor in self.__vendors:
-        self.__runProcessor (vendor)
+      if self.__doData and len (self.tabledata):
+        self.verifyDataKeys ()
 
+      if self.__doSchema:
+        self.executeAndGenerateCode ()
 
+      if self.__doData and len (self.tabledata):
+        self.updateData ()
 
+
+
   # ---------------------------------------------------------------------------
   # Walk through all command line options
   # ---------------------------------------------------------------------------
-  def __check_options (self):
 
-    # we need at least one thing to do :)
-    if self.OPTIONS ["ignore_schema"] and self.OPTIONS ["ignore_data"]:
-      self.handleStartupError (_("--no-schema and --no-data cannot be used "
-                                 "together. What to export?"))
+  def __checkOptions (self):
+    """
+    This function checks wether the given command line arguments and options
+    are usefull or not.
+    """
 
-    # check for unsupported options
-    if self.OPTIONS ["drop_tables"] or self.OPTIONS ["upgrade_schema"] or \
-       self.OPTIONS ["upgrade_data"]:
-      self.handleStartupError (_("--drop-tables, --upgrade-schema and "
-                                 "--upgrade-data\n are not implemented yet."))
-
-
-    # do we have an accessible input file
     if not len (self.ARGUMENTS):
       self.handleStartupError (_("No input file specified."))
 
     try:
-      self.__input = openResource (self.ARGUMENTS [0])
+      self._files = []
 
+      for filename in self.ARGUMENTS:
+        self._files.append (openResource (filename))
+
     except IOError:
-      self.handleStartupError (_("Unable to open input file %s.") % \
-                                 self.ARGUMENTS [0])
+      self.handleStartupError (_("Unable to open input file %s.") % filename)
 
-    # check the specified vendors
-    if self.OPTIONS ["vendor"].lower () == "all":
-      self.__vendors.extend (vendors)
-    else:
-      self.__vendors.extend (self.OPTIONS ["vendor"].split (","))
 
-    self.__output = self.OPTIONS ["output"]
-    if len (self.__vendors) > 1 and self.__output is not None:
-      if not os.path.isdir (self.__output):
-        self.handleStartupError ( \
-          _("If multiply vendors are specified --output must be a "
-            "directory or\n left empty."))
+    if not self.OPTIONS ['connection']:
+      self.handleStartupError (_("No connection specified."))
 
+    self.outfile = self.OPTIONS ['output']
 
-  # ---------------------------------------------------------------------------
-  # Print a list of all available processors
-  # ---------------------------------------------------------------------------
+    if self.OPTIONS ['file-only'] and self.outfile is None:
+      self.handleStartupError (_("Output to file only requested, but no "
+                                 "filename specified."))
 
-  def __listVendors (self):
-    self.printHelpHeader()
-    print "The following vendors can be passed as a parameter to the --vendor 
option."
-    print "To specify multiple vendors, separate with commas. "
-    print
-    print "Supported Database Vendors:"
+    self.__doSchema = self.OPTIONS ['mode'].lower () in ['both', 'schema']
+    self.__doData   = self.OPTIONS ['mode'].lower () in ['both', 'data'] and \
+                          not self.OPTIONS ['file-only']
 
-    modules  = {}
-    maxsize  = 0
+    if not (self.__doSchema or self.__doData):
+      self.handleStartupError (_("Mode of operation must be one of "
+                                 "'both', 'schema' or 'data'."))
 
-    for vendor in vendors:
-      maxsize = max(maxsize, len (vendor))
 
-      try:
-        modules [vendor] = dyn_import (self._PROC_PATH % vendor)
-
-      except ImportError:
-        pass
-
-    available = modules.keys ()
-    available.sort()
-
-    for vendor in available:
-      print "   " + vendor.ljust (maxsize + 4), modules [vendor].description
-
-    print
-    sys.exit()
-
   # ---------------------------------------------------------------------------
-  # Get the name of a given processor
+  # Get a dictionary with all keys listed in tags and values from sObject
   # ---------------------------------------------------------------------------
 
-  def __getProcessorName (self, processor):
-    return dyn_import (self._PROC_PATH % processor).name
+  def fetchTags (self, sObject, tags):
+    """
+    This function creates a dictionary with all attributes from sObject listed
+    in tags, where the attributenames are the keys.
 
+    @param sObject: Schema object to retriev attributes from
+    @param tags: list of all attributes to retrieve
+    @return: dictionary with the attribute names as keys and their values
+    """
+    res = {}
+    for item in tags:
+      if hasattr (sObject, item):
+        res [item] = getattr (sObject, item)
+    return res
 
-  # ---------------------------------------------------------------------------
-  # Run a given processor
-  # ---------------------------------------------------------------------------
 
-  def __runProcessor (self, vendor):
-    if not self.__output:
-      filename = "%s.sql" % self.__getProcessorName (vendor)
-
-    elif os.path.isdir (self.__output):
-      filename = os.path.join (self.__output,
-                               "%s.sql" % self.__getProcessorName (vendor))
-    else:
-      filename = self.__output
-
-    try:
-      self.destination = open (filename, "w")
-
-    except IOError:
-      sys.stderr.write (_("Unable to create output file %s.") % filename)
-      sys.exit (1)
-
-
-    # Instanciate the given processor and iterate over all schema objects
-    aModule = self._PROC_PATH % vendor
-    self.processor = dyn_import (aModule).Processor (self.destination, \
-                                                     self.ARGUMENTS [0])
-
-    print _("Writing schema to %s ...") % filename
-
-    try:
-      self.tables = {}
-      self.data   = []
-
-      self.processor.startDump ()
-      self.processor.client_encoding (self.OPTIONS ['encoding'])
-
-      self.schema.walk (self.__iterate_objects)
-
-      maxPhase = 0
-      for table in self.tables.values ():
-        maxPhase = max (maxPhase, max (table.phases.keys ()))
-
-      for phase in range (0, maxPhase + 1):
-        for table in self.tables.values ():
-          self.processor.writePhase (table, phase)
-
-      for table in self.data:
-        self.processor.writeData (table, table.tableDef)
-
-      self.processor.finishDump ()
-
-      # and finally close the output file
-      self.destination.close ()
-
-    except Exception, message:
-      os.unlink (filename)
-      print message
-
-
-
-
   # ---------------------------------------------------------------------------
   # iteration over all schema objects in the document tree
   # ---------------------------------------------------------------------------
 
-  def __iterate_objects (self, sObject):
-    if sObject._type == "GSSchema":
-      if not self.OPTIONS ["ignore_schema"]:
-        self.processor.startSchema ()
+  def __iterateObjects (self, sObject):
+    """
+    This is the master iteration function, it runs over all top level objects
+    in the GSObject tree and processes GSTable- and GSTableData-subtrees.
 
-    elif sObject._type == "GSTable":
-      if not self.OPTIONS ["ignore_schema"]:
-        self.__schema_table (sObject)
+    @param sObject: current Schema object to be processed
+    """
+    if sObject._type == "GSTable":
+      self.tables.append ({'name': sObject.name})
+      sObject.walk (self.__schemaFields, defs = self.tables [-1])
 
-    elif sObject._type == "GSData":
-      if not self.OPTIONS ["ignore_data"]:
-        self.processor.startData ()
-
     elif sObject._type == "GSTableData":
-      if not self.OPTIONS ["ignore_data"]:
-        self.__data_table (sObject)
+      self.tabledata.append ({'name': sObject.tablename, 'rows': []})
+      sObject.walk (self.__dataRows, defs = self.tabledata [-1]['rows'])
 
     return
 
 
   # ---------------------------------------------------------------------------
-  # Process the schema definition of a GSTable object
+  # Process the fields of a GSTable
   # ---------------------------------------------------------------------------
-  def __schema_table (self, sObject):
-    aTable = TableDefinition (sObject.name, sObject.action)
-    self.tables [aTable.name] = aTable
-    sObject.walk (self.__schema_fields, tableDef = aTable)
 
-    self.processor.translateTableDefinition (aTable)
+  def __schemaFields (self, sObject, defs):
+    """
+    This function iterates over all child elements of a GSTable instance and
+    converts this subtree into a table definition dictionary given by the
+    parameter defs.
 
+    @param sObject: current schema object to be processed
+    @param defs: dictionary of the table definition, which has to be extended
+        by this function.
+    """
 
-  # ---------------------------------------------------------------------------
-  # Process the fields of a GSTable
-  # ---------------------------------------------------------------------------
-  def __schema_fields (self, sObject, tableDef):
-
     # process a regular field of a table
     if sObject._type == "GSField":
-      tableDef.fields.append (sObject)
+      fDef = self.fetchTags (sObject, ['name', 'type', 'default',
+                        'defaultwith', 'length', 'precision', 'nullable'])
+      if not defs.has_key ('fields'):
+        defs ['fields'] = []
+      defs ['fields'].append (fDef)
 
+    # add a primary key definition to the table definition
     elif sObject._type == "GSPrimaryKey":
-      pkdef = tableDef.addPrimaryKey (sObject.name)
-      sObject.walk (self.__schema_primarykey, tableDef = tableDef, pDef = 
pkdef)
+      pkDef = {'name': sObject.name, 'fields': []}
+      defs ['primarykey'] = pkDef
+      sObject.walk (self.__schemaPrimaryKey, defs = pkDef)
 
     # start an index definition and process it's fields
     elif sObject._type == "GSIndex":
-      uniq = hasattr (sObject, "unique") and sObject.unique
-      index = tableDef.newIndex (sObject.name, uniq)
+      if not defs.has_key ('indices'):
+        defs ['indices'] = []
 
-      # iterate over all index fields
-      sObject.walk (self.__schema_index, tableDef = tableDef, indexDef = index)
+      indexDef = {'name': sObject.name,
+                  'unique': hasattr (sObject, "unique") and sObject.unique,
+                  'fields': []}
+      defs ['indices'].append (indexDef)
 
+      sObject.walk (self.__schemaIndex, defs = indexDef)
+
     # create constraints
     elif sObject._type == "GSConstraint":
       # for unique-constraints we use a 'unique index'
       if sObject.type == "unique":
-        cDef = tableDef.newIndex (sObject.name, True)
+        if not defs.has_key ('indices'):
+          defs ['indices'] = []
 
+        cDef = {'name'  : sObject.name,
+                'unique': True,
+                'fields': []}
+        defs ['indices'].append (cDef)
+
       # for all other types of constraints we use a ConstraintDefinition
       else:
-        cDef = tableDef.newConstraint (sObject.name, sObject.type)
+        if not defs.has_key ('constraints'):
+          defs ['constraints'] = []
 
-      sObject.walk (self.__schema_constraint, constraint = cDef)
+        cDef = self.fetchTags (sObject, ['name', 'type'])
+        cDef ['fields'] = []
+        defs ['constraints'].append (cDef)
 
+      sObject.walk (self.__schemaConstraint, defs = cDef)
 
 
   # ---------------------------------------------------------------------------
   # Iterate over all fields of a primary key
   # ---------------------------------------------------------------------------
-  def __schema_primarykey (self, sObject, tableDef, pDef):
+
+  def __schemaPrimaryKey (self, sObject, defs):
+    """
+    This function converts all GSPKField instances into a primary key
+    definition (dictionary).
+
+    @param sObject: current schema object to be processed
+    @param defs: dictionary describing the primary key
+    """
     if sObject._type == "GSPKField":
-      pDef.fields.append (sObject)
+      defs ['fields'].append (sObject.name)
 
 
   # ---------------------------------------------------------------------------
   # Iterate over all fields of an index
   # ---------------------------------------------------------------------------
-  def __schema_index (self, sObject, tableDef, indexDef):
+
+  def __schemaIndex (self, sObject, defs):
+    """
+    This function converts all GSIndexField instances into a index definition
+    (dictionary).
+
+    @param sObject: current schema object to be processed
+    @param defs: dictionary describing the index
+    """
     if sObject._type == "GSIndexField":
-      indexDef.fields.append (sObject)
+      defs ['fields'].append (sObject.name)
 
 
   # ---------------------------------------------------------------------------
   # Iterate over all children of a constraint definition
   # ---------------------------------------------------------------------------
 
-  def __schema_constraint (self, sObject, constraint):
+  def __schemaConstraint (self, sObject, defs):
+    """
+    This function converts constraint fields and references into a constraint
+    definition (dictionary).
+
+    @param sObject: current schema object to be processed
+    @param defs: dictionary describing the constraint
+    """
     if sObject._type == "GSConstraintField":
-      constraint.fields.append (sObject)
+      defs ['fields'].append (sObject.name)
 
     elif sObject._type == "GSConstraintRef":
-      constraint.reftable = sObject.table
-      constraint.reffields.append (sObject)
+      defs ['reftable'] = sObject.table
+      if not defs.has_key ('reffields'):
+        defs ['reffields'] = []
+      defs ['reffields'].append (sObject.name)
 
 
   # ---------------------------------------------------------------------------
-  # Process a tabledata node
+  # Iterate over all rows of a tabledata definition
   # ---------------------------------------------------------------------------
-  def __data_table (self, sObject):
-    data = DataDefinition (sObject.tablename)
 
-    self.data.append (data)
-    if self.tables.has_key (data.name):
-      data.tableDef = self.tables [data.name]
+  def __dataRows (self, sObject, defs):
+    """
+    This function converts a GSRow instance into an element in the given data
+    dictionary.
 
-    sObject.walk (self.__data_rows, dataDef = data)
+    @param sObject: current schema object to be processed
+    @param defs: sequence of row definitions of the corresponding table
+    """
+    if sObject._type == "GSRow":
+      defs.append ({'key': [], 'fields': []})
+      sObject.walk (self.__dataValues, defs [-1])
 
 
   # ---------------------------------------------------------------------------
-  # Iterate over all rows of a tabledata definition
+  # Iterate over all values of a row definition
   # ---------------------------------------------------------------------------
-  def __data_rows (self, sObject, dataDef):
-    if sObject._type == "GSRow":
-      row = dataDef.addRow ()
-      sObject.walk (self.__data_values, rowDef = row)
+  def __dataValues (self, sObject, defs):
+    """
+    This function translates a GSValue instance into an element of a row
+    definition dictionary.
 
+    @param sObject: current schema object to be processed.
+    @param defs: row definition dictionary to be extended.
+    """
+    if sObject._type == "GSValue":
+      defs ['fields'].append ({'name' : sObject.field, 'value': sObject.value})
+      if hasattr (sObject, "key") and sObject.key:
+        defs ['key'].append (sObject.field)
 
+
   # ---------------------------------------------------------------------------
-  # Iterate over all values of a row definition
+  # Make sure all rows in the table data sequence have a valid key
   # ---------------------------------------------------------------------------
-  def __data_values (self, sObject, rowDef):
-    if sObject._type == "GSValue":
-      if hasattr (sObject, "field"):
-        rowDef.columns.append (sObject.field)
 
-      if hasattr (sObject, "type"):
-        rowDef.types.append (sObject.type)
+  def verifyDataKeys (self):
+    """
+    This function iterates over all data rows and makes sure they all have an
+    appropriate key specified. Given all data rows of a table, there must be at
+    least on row with the key-attribute set on it's fields. If there is no
+    row with key information, but a table definition with a primary key this
+    key will be used.
 
-      rowDef.values.append (sObject)
+    @raise MissingKeyError: If no key information could be found for all rows
+        of a table this exception will be raised.
+    """
+    for item in self.tabledata:
+      tableName = item ['name']
+      rows      = item ['rows']
+      tableKey  = None
+      missing   = False
 
+      # is a key missing or does at least one row specify a key?
+      for row in rows:
+        if len (row ['key']):
+          tableKey = row ['key']
+        else:
+          missing = True
 
+      if not missing:
+        continue
+
+      # is there a table definition with a valid primary key available ?
+      if tableKey is None:
+        for item in self.tables:
+          if item ['name'] == tableName and item.has_key ('primarykey'):
+            tableKey = item['primarykey']['fields']
+
+      # if at least one key was available, fill up all missing keys with this
+      # one.
+      if tableKey is not None:
+        for row in rows:
+          if not len (row ['key']):
+            row ['key'] = tableKey
+        continue
+
+      raise MissingKeyError, (tableName)
+
+
+  # ---------------------------------------------------------------------------
+  # Execute and generate the code
+  # ---------------------------------------------------------------------------
+
+  def executeAndGenerateCode (self):
+    """
+    This function logs into the given connection and calls it for an update of
+    it's schema according to the loaded table definitions. Additionally the
+    schema creation code is generated by this call, which will be stored in the
+    given output file (if requested by options).
+    """
+
+    connection = self.connections.getConnection (self.OPTIONS ['connection'],
+                                                 login = True)
+
+    print _("Updating schema ...")
+    code = connection.updateSchema (self.tables, self.OPTIONS ['file-only'])
+
+    if self.outfile is not None:
+      dest = open (self.outfile, 'w')
+
+      for item in code:
+        for line in item:
+          dest.write (line + "\n")
+
+      dest.close ()
+
+
+  # ---------------------------------------------------------------------------
+  # Update connection with table data 
+  # ---------------------------------------------------------------------------
+
+  def updateData (self):
+    """
+    This function updates the backend with data. Every row is checked using the
+    specified key-fields and updated if it exists otherwise inserted.
+    """
+
+    print _("Updating data ...")
+      
+    for table in self.tabledata:
+      tablename = table ['name']
+      rows      = table ['rows']
+
+      attributes = {'name'    : "dts_%s" % tablename,
+                    'database': self.OPTIONS ['connection'],
+                    'table'   : tablename}
+
+      fieldList = [field ['name'] for field in rows [0]['fields']]
+
+      datasource = GDataSource.DataSourceWrapper (
+          connections = self.connections,
+          attributes  = attributes,
+          fields      = fieldList,
+          unicodeMode = True)
+
+      print _("  updating table '%s' ...") % tablename
+      updCount = 0
+      insCount = 0
+
+      for row in rows:
+        cList = ['and']
+
+        for keyField in row ['key']:
+          for field in row ['fields']:
+            if field ['name'] == keyField:
+              kvalue = field ['value']
+              break
+
+          cList.append (['eq', ['field', keyField], ['const', kvalue]])
+
+        condition = GConditions.buildTreeFromList (cList)
+        resultSet = datasource.createResultSet (condition)
+
+        if resultSet.firstRecord () is None:
+          resultSet.insertRecord ()
+          insCount += 1
+        else:
+          updCount += 1
+
+        for field in row ['fields']:
+          resultSet.current.setField (field ['name'], field ['value'])
+
+        resultSet.post ()
+
+      self.connections.commitAll ()
+      print _("    Inserted %d row(s), updated %d row(s)") % \
+          (insCount, updCount)
+
+
+
 # =============================================================================
 # If executed directly, start the scripter
 # =============================================================================





reply via email to

[Prev in Thread] Current Thread [Next in Thread]