toMap();
+
/**
* load current object from DB
*/
public boolean loadFromDB() throws ExceptionDBGit;
-
+
public String getHash();
-
+
/**
* Save meta file to base path
* Example - .dbgit/basePath/path_and_filename_meta_object
- *
+ *
* @param basePath
* @throws IOException
*/
default boolean saveToFile(String basePath) throws ExceptionDBGit {
File file = new File(DBGitPath.getFullPath(basePath)+"/"+getFileName());
DBGitPath.createDir(file.getParent());
-
+
try {
FileOutputStream out = new FileOutputStream(file.getAbsolutePath());
boolean res = this.serialize(out);
out.close();
-
+
return res;
} catch (Exception e) {
- e.printStackTrace();
throw new ExceptionDBGit(e);
}
}
-
+
default boolean saveToFile() throws ExceptionDBGit {
return saveToFile(null);
}
-
+
/**
* Load meta file to base path
* Example - .dbgit/basePath/path_and_filename_meta_object
- *
+ *
* @param basePath
* @throws IOException
*/
@@ -124,7 +135,11 @@ default IMetaObject loadFromFile(String basePath) throws Exception {
String filename = DBGitPath.getFullPath(basePath);
File file = new File(filename+"/"+getFileName());
FileInputStream fis = new FileInputStream(file);
- IMetaObject meta = this.deSerialize(fis);
+ IMetaObject meta;
+ if (!file.getPath().endsWith(".csv"))
+ meta = this.deSerialize(fis);
+ else
+ meta = this.deSerialize(file);
fis.close();
if (meta != null && meta.getName().isEmpty()) {
meta.setName(this.getName());
@@ -145,10 +160,16 @@ default IMetaObject loadFromFile() throws Exception {
default DBSchemaObject getUnderlyingDbObject(){
//All in one place
if(this instanceof MetaSql) return ((MetaSql) this).getSqlObject();
+ if(this instanceof MetaSequence) return ((MetaSequence) this).getSequence();
if(this instanceof MetaTable) return ((MetaTable) this).getTable();
return null;
}
+ default boolean dependsOn(IMetaObject obj){
+ if (this.getUnderlyingDbObject() == null || this.getUnderlyingDbObject().getDependencies() == null) return false;
+ return this.getUnderlyingDbObject().getDependencies().contains(obj.getName());
+ }
+
static IMetaObject create(String name) throws ExceptionDBGit {
NameMeta nm = new NameMeta(name);
if (nm.getType() == null) throw new ExceptionDBGit(DBGitLang.getInstance().getValue("errors", "meta", "parseError").withParams(name));
diff --git a/src/main/java/ru/fusionsoft/dbgit/meta/MetaBase.java b/src/main/java/ru/fusionsoft/dbgit/meta/MetaBase.java
index 03d33e3..30d10cb 100644
--- a/src/main/java/ru/fusionsoft/dbgit/meta/MetaBase.java
+++ b/src/main/java/ru/fusionsoft/dbgit/meta/MetaBase.java
@@ -1,137 +1,164 @@
-package ru.fusionsoft.dbgit.meta;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.nio.charset.Charset;
-
-import org.yaml.snakeyaml.DumperOptions;
-import org.yaml.snakeyaml.Yaml;
-import org.yaml.snakeyaml.DumperOptions.ScalarStyle;
-import org.yaml.snakeyaml.nodes.Tag;
-
-import ru.fusionsoft.dbgit.adapters.AdapterFactory;
-import ru.fusionsoft.dbgit.core.DBGit;
-import ru.fusionsoft.dbgit.core.DBGitPath;
-import ru.fusionsoft.dbgit.core.ExceptionDBGit;
-import ru.fusionsoft.dbgit.core.ExceptionDBGitRunTime;
-import ru.fusionsoft.dbgit.core.db.DbType;
-import ru.fusionsoft.dbgit.utils.ConsoleWriter;
-import ru.fusionsoft.dbgit.yaml.DBGitYamlConstructor;
-import ru.fusionsoft.dbgit.yaml.DBGitYamlRepresenter;
-import ru.fusionsoft.dbgit.yaml.YamlOrder;
-
-
-/**
- * Base class for all meta objects
- * @author mikle
- *
- */
-public abstract class MetaBase implements IMetaObject {
- @YamlOrder(0)
- protected String name;
-
- @YamlOrder(1)
- protected DbType dbType;
-
- @YamlOrder(1)
- protected String dbVersion;
-
- @Override
- public String getName() {
- return name;
- }
-
- @Override
- public void setDbType(DbType dbType) {
- this.dbType = dbType;
- }
-
- @Override
- public DbType getDbType() {
- return dbType;
- }
-
- @Override
- public void setDbVersion(String dbVersion) {
- this.dbVersion = dbVersion;
- }
-
- @Override
- public String getDbVersion() {
- return dbVersion;
- }
-
- @Override
- public void setName(String name) throws ExceptionDBGit {
- this.name = name;
- }
-
- @Override
- public String getFileName() {
- return getName();
- }
-
- /**
- * When you save the yaml object, the library ignores properties for which there is no getter and setter
- * При сохранении объекта yaml библиотека игнорирует свойства для которых нет геттера и сеттера
- * @param stream
- * @throws IOException
- */
- public boolean yamlSerialize(OutputStream stream) throws IOException {
- Yaml yaml = createYaml();
- String output = yaml.dumpAs(this, Tag.MAP, DumperOptions.FlowStyle.BLOCK);
-
- stream.write(output.getBytes(Charset.forName("UTF-8")));
- return true;
- }
-
- public IMetaObject yamlDeSerialize(InputStream stream) {
- Yaml yaml = createYaml();
-
- IMetaObject meta = yaml.loadAs(stream, this.getClass());
- return meta;
- }
-
- public Yaml createYaml() {
- DumperOptions options = new DumperOptions();
- options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
- options.setPrettyFlow(true);
- Yaml yaml = new Yaml(new DBGitYamlConstructor(), new DBGitYamlRepresenter(), options);
- return yaml;
- }
-
- @Override
- public int addToGit() throws ExceptionDBGit {
- DBGit dbGit = DBGit.getInstance();
- dbGit.addFileToIndexGit(DBGitPath.DB_GIT_PATH+"/"+getFileName());
- return 1;
- }
-
- @Override
- public int removeFromGit() throws ExceptionDBGit {
- DBGit dbGit = DBGit.getInstance();
- dbGit.removeFileFromIndexGit(DBGitPath.DB_GIT_PATH+"/"+getFileName());
- return 1;
- }
-
- public void setDbType() {
- try {
- setDbType(AdapterFactory.createAdapter().getDbType());
- } catch (ExceptionDBGit e) {
- throw new ExceptionDBGitRunTime(e.getLocalizedMessage());
- }
-
- }
-
- public void setDbVersion() {
- try {
- setDbVersion(AdapterFactory.createAdapter().getDbVersion());
- } catch (ExceptionDBGit e) {
- throw new ExceptionDBGitRunTime(e.getLocalizedMessage());
- }
-
- }
-
-
-}
+package ru.fusionsoft.dbgit.meta;
+
+import java.io.*;
+import java.nio.charset.Charset;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.yaml.snakeyaml.DumperOptions;
+import org.yaml.snakeyaml.Yaml;
+import org.yaml.snakeyaml.nodes.Tag;
+
+import ru.fusionsoft.dbgit.adapters.AdapterFactory;
+import ru.fusionsoft.dbgit.core.DBGit;
+import ru.fusionsoft.dbgit.core.DBGitPath;
+import ru.fusionsoft.dbgit.core.ExceptionDBGit;
+import ru.fusionsoft.dbgit.core.ExceptionDBGitRunTime;
+import ru.fusionsoft.dbgit.core.db.DbType;
+import ru.fusionsoft.dbgit.utils.StringProperties;
+import ru.fusionsoft.dbgit.yaml.DBGitYamlConstructor;
+import ru.fusionsoft.dbgit.yaml.DBGitYamlRepresenter;
+import ru.fusionsoft.dbgit.yaml.YamlOrder;
+
+
+/**
+ * Base class for all meta objects
+ * @author mikle
+ *
+ */
+public abstract class MetaBase implements IMetaObject {
+ @YamlOrder(0)
+ protected String name;
+
+ @YamlOrder(1)
+ protected DbType dbType;
+
+ @YamlOrder(2)
+ protected String dbVersion;
+
+ @Override
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public void setDbType(DbType dbType) {
+ this.dbType = dbType;
+ }
+
+ @Override
+ public DbType getDbType() {
+ return dbType;
+ }
+
+ @Override
+ public void setDbVersion(String dbVersion) {
+ this.dbVersion = dbVersion;
+ }
+
+ @Override
+ public String getDbVersion() {
+ return dbVersion;
+ }
+
+ @Override
+ public Double getDbVersionNumber() {
+ Matcher matcher = Pattern.compile("\\D*(\\d+)\\.(\\d+)").matcher(getDbVersion());
+ matcher.find();
+ Double result = Double.valueOf(matcher.group(0)+matcher.group(1));
+ return result;
+ }
+
+ @Override
+ public void setName(String name) throws ExceptionDBGit {
+ this.name = name;
+ }
+
+ @Override
+ public String getFileName() {
+ return getName();
+ }
+
+ /**
+ * When you save the yaml object, the library ignores properties for which there is no getter and setter
+ * При сохранении объекта yaml библиотека игнорирует свойства для которых нет геттера и сеттера
+ * @param stream
+ * @throws IOException
+ */
+ public boolean yamlSerialize(OutputStream stream) throws IOException {
+ Yaml yaml = createYaml();
+ String output = yaml.dumpAs(this, Tag.MAP, DumperOptions.FlowStyle.BLOCK);
+
+ stream.write(output.getBytes(Charset.forName("UTF-8")));
+ return true;
+ }
+
+ public IMetaObject yamlDeSerialize(InputStream stream) {
+ Yaml yaml = createYaml();
+ //Map some = yaml.loadAs(stream, Map.class);
+ IMetaObject meta = yaml.loadAs(stream, this.getClass());
+ return meta;
+ }
+
+ public Map toMap() {
+ Yaml yaml = createYaml();
+ String output = yaml.dumpAs(this, Tag.MAP, DumperOptions.FlowStyle.BLOCK);
+ Map meta = yaml.loadAs(output, Map.class);
+ return meta;
+ }
+
+ public Yaml createYaml() {
+ DumperOptions options = new DumperOptions();
+ options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
+ options.setPrettyFlow(true);
+
+ Yaml yaml = new Yaml(new DBGitYamlConstructor(), new DBGitYamlRepresenter(), options);
+ return yaml;
+ }
+
+ @Override
+ public int addToGit() throws ExceptionDBGit {
+ DBGit dbGit = DBGit.getInstance();
+ dbGit.addFileToIndexGit(DBGitPath.DB_GIT_PATH+"/"+getFileName());
+ return 1;
+ }
+
+ @Override
+ public int removeFromGit() throws ExceptionDBGit {
+ DBGit dbGit = DBGit.getInstance();
+ dbGit.removeFileFromIndexGit(DBGitPath.DB_GIT_PATH+"/"+getFileName());
+ return 1;
+ }
+
+ public void setDbType() {
+ try {
+ setDbType(AdapterFactory.createAdapter().getDbType());
+ } catch (ExceptionDBGit e) {
+ throw new ExceptionDBGitRunTime(e.getLocalizedMessage());
+ }
+
+ }
+
+ public void setDbVersion() {
+ try {
+ setDbVersion(AdapterFactory.createAdapter().getDbVersion());
+ } catch (ExceptionDBGit e) {
+ throw new ExceptionDBGitRunTime(e.getLocalizedMessage());
+ }
+
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (!(o instanceof MetaBase)) return false;
+ MetaBase metaBase = (MetaBase) o;
+ return getHash().equals(metaBase.getHash());
+ }
+
+ @Override
+ public int hashCode() {
+ return getHash().hashCode();
+ }
+}
diff --git a/src/main/java/ru/fusionsoft/dbgit/meta/MetaDomain.java b/src/main/java/ru/fusionsoft/dbgit/meta/MetaDomain.java
new file mode 100644
index 0000000..a7aa969
--- /dev/null
+++ b/src/main/java/ru/fusionsoft/dbgit/meta/MetaDomain.java
@@ -0,0 +1,40 @@
+package ru.fusionsoft.dbgit.meta;
+
+import ru.fusionsoft.dbgit.adapters.AdapterFactory;
+import ru.fusionsoft.dbgit.adapters.IDBAdapter;
+import ru.fusionsoft.dbgit.core.ExceptionDBGit;
+import ru.fusionsoft.dbgit.dbobjects.DBDomain;
+import ru.fusionsoft.dbgit.dbobjects.DBSQLObject;
+
+public class MetaDomain extends MetaSql {
+ public MetaDomain() {
+ }
+
+ public MetaDomain(DBSQLObject sqlObject) throws ExceptionDBGit {
+ super(sqlObject);
+ }
+
+ /**
+ * @return Type meta object
+ */
+ @Override
+ public final IDBGitMetaType getType() {
+ return DBGitMetaType.DBGitDomain;
+ }
+
+ /**
+ * load current object from DB
+ */
+ @Override
+ public final boolean loadFromDB() throws ExceptionDBGit {
+ final IDBAdapter adapter = AdapterFactory.createAdapter();
+ final NameMeta nm = MetaObjectFactory.parseMetaName(name);
+ final DBDomain dbObject = adapter.getDomain(nm.getSchema(), nm.getName());
+
+ if (dbObject != null) {
+ setSqlObject(dbObject);
+ return true;
+ } else
+ return false;
+ }
+}
diff --git a/src/main/java/ru/fusionsoft/dbgit/meta/MetaEnum.java b/src/main/java/ru/fusionsoft/dbgit/meta/MetaEnum.java
new file mode 100644
index 0000000..fd41f51
--- /dev/null
+++ b/src/main/java/ru/fusionsoft/dbgit/meta/MetaEnum.java
@@ -0,0 +1,32 @@
+package ru.fusionsoft.dbgit.meta;
+
+import ru.fusionsoft.dbgit.adapters.AdapterFactory;
+import ru.fusionsoft.dbgit.adapters.IDBAdapter;
+import ru.fusionsoft.dbgit.core.ExceptionDBGit;
+import ru.fusionsoft.dbgit.dbobjects.DBEnum;
+
+public class MetaEnum extends MetaSql {
+ /**
+ * @return Type meta object
+ */
+ @Override
+ public IDBGitMetaType getType() {
+ return DBGitMetaType.DBGitEnum;
+ }
+
+ /**
+ * load current object from DB
+ */
+ @Override
+ public boolean loadFromDB() throws ExceptionDBGit {
+ final IDBAdapter adapter = AdapterFactory.createAdapter();
+ final NameMeta nm = MetaObjectFactory.parseMetaName(name);
+ final DBEnum dbObject = adapter.getEnum(nm.getSchema(), nm.getName());
+
+ if (dbObject != null) {
+ setSqlObject(dbObject);
+ return true;
+ } else
+ return false;
+ }
+}
diff --git a/src/main/java/ru/fusionsoft/dbgit/meta/MetaFunction.java b/src/main/java/ru/fusionsoft/dbgit/meta/MetaFunction.java
index 9823362..09d6a0e 100644
--- a/src/main/java/ru/fusionsoft/dbgit/meta/MetaFunction.java
+++ b/src/main/java/ru/fusionsoft/dbgit/meta/MetaFunction.java
@@ -11,7 +11,7 @@ public class MetaFunction extends MetaSql {
public MetaFunction() {
super();
}
-
+
public MetaFunction(DBFunction fun) throws ExceptionDBGit {
super(fun);
}
@@ -21,14 +21,12 @@ public DBGitMetaType getType() {
return DBGitMetaType.DbGitFunction;
}
- @Override
- public String getName() {
- return name;
- }
-
@Override
public String getFileName(){
String res = name.replace(".fnc", "");
+ String schemaName = "";
+ if (res.contains("/"))
+ schemaName = res.substring(0, res.indexOf("/"));
if (this.getSqlObject() != null && this.getSqlObject().getOptions() != null && this.getSqlObject().getOptions().get("arguments") != null)
res = res + "_" + this.getSqlObject().getOptions().get("arguments").getData()
@@ -41,7 +39,7 @@ public String getFileName(){
.replace("::", "");
if (res.endsWith("_")) res = res.substring(0, res.length() - 1);
- if (res.length() > MAX_FILE_NAME_LENGTH) {
+ if (res.length() > (schemaName.length() + 1 + MAX_FILE_NAME_LENGTH)) {
String resTemp = res.substring(0, MAX_FILE_NAME_LENGTH);
int resInt = res.length() - MAX_FILE_NAME_LENGTH;
res = resTemp + "_" + resInt;
@@ -51,7 +49,7 @@ public String getFileName(){
return res;
}
-
+
@Override
public boolean loadFromDB() throws ExceptionDBGit {
IDBAdapter adapter = AdapterFactory.createAdapter();
diff --git a/src/main/java/ru/fusionsoft/dbgit/meta/MetaObjOptions.java b/src/main/java/ru/fusionsoft/dbgit/meta/MetaObjOptions.java
index 8063e44..d4f5d8b 100644
--- a/src/main/java/ru/fusionsoft/dbgit/meta/MetaObjOptions.java
+++ b/src/main/java/ru/fusionsoft/dbgit/meta/MetaObjOptions.java
@@ -4,15 +4,14 @@
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Map;
+import java.util.Objects;
-import ru.fusionsoft.dbgit.adapters.AdapterFactory;
-import ru.fusionsoft.dbgit.adapters.IDBAdapter;
import ru.fusionsoft.dbgit.core.DBGitLang;
import ru.fusionsoft.dbgit.core.ExceptionDBGit;
import ru.fusionsoft.dbgit.core.ExceptionDBGitObjectNotFound;
import ru.fusionsoft.dbgit.dbobjects.DBOptionsObject;
-import ru.fusionsoft.dbgit.dbobjects.DBUser;
import ru.fusionsoft.dbgit.utils.CalcHash;
+import ru.fusionsoft.dbgit.yaml.YamlOrder;
/**
* Base Meta class for data use DBOptionsObject information. This data is tree string properties.
@@ -21,6 +20,7 @@
*/
public abstract class MetaObjOptions extends MetaBase {
+ @YamlOrder(4)
private DBOptionsObject objectOption = null;
public MetaObjOptions() {
@@ -48,7 +48,7 @@ public boolean serialize(OutputStream stream) throws IOException {
}
@Override
- public IMetaObject deSerialize(InputStream stream) throws IOException{
+ public IMetaObject deSerialize(InputStream stream) {
return yamlDeSerialize(stream);
}
@@ -75,4 +75,16 @@ public void setObjectOptionFromMap(Map map) t
setObjectOption(map.get(nm.getName()));
}
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (!(o instanceof MetaObjOptions)) return false;
+ MetaObjOptions that = (MetaObjOptions) o;
+ return getObjectOption().getHash().equals(that.getObjectOption().getHash());
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(getObjectOption());
+ }
}
diff --git a/src/main/java/ru/fusionsoft/dbgit/meta/MetaObjectFactory.java b/src/main/java/ru/fusionsoft/dbgit/meta/MetaObjectFactory.java
index ca303bd..0beb554 100644
--- a/src/main/java/ru/fusionsoft/dbgit/meta/MetaObjectFactory.java
+++ b/src/main/java/ru/fusionsoft/dbgit/meta/MetaObjectFactory.java
@@ -46,21 +46,18 @@ public static IMetaObject createMetaObject(IDBGitMetaType tp) throws ExceptionDB
}
- public static NameMeta parseMetaName(String name) throws ExceptionDBGit {
- try {
- NameMeta nm = new NameMeta();
-
- Integer pos = name.lastIndexOf("/");
- if (pos > 0) {
- nm.setSchema(name.substring(0, pos));
- }
- Integer posDot = name.lastIndexOf(".");
- nm.setName(name.substring(pos+1, posDot));
- nm.setType(DBGitMetaType.valueByCode(name.substring(posDot + 1)));
+ public static NameMeta parseMetaName(String name) {
+ NameMeta nm = new NameMeta();
- return nm;
- } catch(Exception e) {
- throw new ExceptionDBGitRunTime(DBGitLang.getInstance().getValue("errors", "meta", "parseError").withParams(name), e);
+ Integer pos = name.lastIndexOf("/");
+ if (pos > 0) {
+ nm.setSchema(name.substring(0, pos));
}
+ Integer posDot = name.lastIndexOf(".");
+ nm.setName(name.substring(pos+1, posDot));
+ nm.setType(DBGitMetaType.valueByCode(name.substring(posDot + 1)));
+
+ return nm;
+
}
}
diff --git a/src/main/java/ru/fusionsoft/dbgit/meta/MetaProcedure.java b/src/main/java/ru/fusionsoft/dbgit/meta/MetaProcedure.java
index 7335917..46a8a61 100644
--- a/src/main/java/ru/fusionsoft/dbgit/meta/MetaProcedure.java
+++ b/src/main/java/ru/fusionsoft/dbgit/meta/MetaProcedure.java
@@ -29,6 +29,9 @@ public String getName() {
@Override
public String getFileName(){
String res = name.replace(".prc", "");
+ String schemaName = "";
+ if (res.contains("/"))
+ schemaName = res.substring(0, res.indexOf("/"));
if (this.getSqlObject() != null && this.getSqlObject().getOptions() != null && this.getSqlObject().getOptions().get("arguments") != null)
res = res + "_" + this.getSqlObject().getOptions().get("arguments").getData()
@@ -41,7 +44,7 @@ public String getFileName(){
.replace("::", "");
if (res.endsWith("_")) res = res.substring(0, res.length() - 1);
- if (res.length() > MAX_FILE_NAME_LENGTH) {
+ if (res.length() > (schemaName.length() + 1 + MAX_FILE_NAME_LENGTH)) {
String resTemp = res.substring(0, MAX_FILE_NAME_LENGTH);
int resInt = res.length() - MAX_FILE_NAME_LENGTH;
res = resTemp + "_" + resInt;
diff --git a/src/main/java/ru/fusionsoft/dbgit/meta/MetaSql.java b/src/main/java/ru/fusionsoft/dbgit/meta/MetaSql.java
index 78667f9..7319170 100644
--- a/src/main/java/ru/fusionsoft/dbgit/meta/MetaSql.java
+++ b/src/main/java/ru/fusionsoft/dbgit/meta/MetaSql.java
@@ -1,18 +1,10 @@
package ru.fusionsoft.dbgit.meta;
+import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
-import java.io.StringWriter;
-import java.nio.charset.Charset;
-import java.nio.charset.StandardCharsets;
-import java.util.Map;
-import org.apache.commons.io.IOUtils;
-
-import ru.fusionsoft.dbgit.core.DBGitLang;
import ru.fusionsoft.dbgit.core.ExceptionDBGit;
-import ru.fusionsoft.dbgit.core.ExceptionDBGitObjectNotFound;
-import ru.fusionsoft.dbgit.dbobjects.DBOptionsObject;
import ru.fusionsoft.dbgit.dbobjects.DBSQLObject;
/**
@@ -21,19 +13,18 @@
*
*/
public abstract class MetaSql extends MetaBase {
-
-
+
protected DBSQLObject sqlObject;
public MetaSql() {
setDbType();
setDbVersion();
}
-
+
public MetaSql(DBSQLObject sqlObject) throws ExceptionDBGit {
this();
setSqlObject(sqlObject);
- }
-
+ }
+
public DBSQLObject getSqlObject() {
return sqlObject;
}
@@ -45,7 +36,7 @@ public void setSqlObject(DBSQLObject sqlObject) throws ExceptionDBGit {
}
@Override
- public boolean serialize(OutputStream stream) throws Exception {
+ public boolean serialize(OutputStream stream) throws IOException {
/*
String owner = "owner: "+getSqlObject().getOwner()+"\n";
stream.write(owner.getBytes(Charset.forName("UTF-8")));
@@ -59,7 +50,7 @@ public boolean serialize(OutputStream stream) throws Exception {
}
@Override
- public IMetaObject deSerialize(InputStream stream) throws Exception {
+ public IMetaObject deSerialize(InputStream stream) {
NameMeta nm = MetaObjectFactory.parseMetaName(getName());
/*
sqlObject = new DBSQLObject();
@@ -83,12 +74,13 @@ public IMetaObject deSerialize(InputStream stream) throws Exception {
public String getHash() {
return sqlObject != null ? sqlObject.getHash() : EMPTY_HASH;
}
- public void setObjectOptionFromMap(Map map) throws ExceptionDBGit {
- NameMeta nm = MetaObjectFactory.parseMetaName(getName());
- if (!map.containsKey(nm.getName())) {
- throw new ExceptionDBGitObjectNotFound(DBGitLang.getInstance().getValue("errors", "meta", "notFound").withParams(getName()));
- }
- setSqlObject(map.get(nm.getName()));
- }
+
+// public void setSqlObjectFromMap(Map map) throws ExceptionDBGit {
+// NameMeta nm = MetaObjectFactory.parseMetaName(getName());
+// if (!map.containsKey(nm.getName())) {
+// throw new ExceptionDBGitObjectNotFound(DBGitLang.getInstance().getValue("errors", "meta", "notFound").withParams(getName()));
+// }
+// setSqlObject(map.get(nm.getName()));
+// }
}
diff --git a/src/main/java/ru/fusionsoft/dbgit/meta/MetaTable.java b/src/main/java/ru/fusionsoft/dbgit/meta/MetaTable.java
index 96bbcb1..927a338 100644
--- a/src/main/java/ru/fusionsoft/dbgit/meta/MetaTable.java
+++ b/src/main/java/ru/fusionsoft/dbgit/meta/MetaTable.java
@@ -11,55 +11,49 @@
import ru.fusionsoft.dbgit.adapters.IDBAdapter;
import ru.fusionsoft.dbgit.core.DBGitIndex;
import ru.fusionsoft.dbgit.core.ExceptionDBGit;
-import ru.fusionsoft.dbgit.core.ExceptionDBGitRunTime;
-import ru.fusionsoft.dbgit.core.ItemIndex;
-import ru.fusionsoft.dbgit.dbobjects.DBConstraint;
-import ru.fusionsoft.dbgit.dbobjects.DBIndex;
-import ru.fusionsoft.dbgit.dbobjects.DBSchema;
-import ru.fusionsoft.dbgit.dbobjects.DBTable;
-import ru.fusionsoft.dbgit.dbobjects.DBTableField;
+import ru.fusionsoft.dbgit.core.ExceptionDBGitObjectNotFound;
+import ru.fusionsoft.dbgit.dbobjects.*;
import ru.fusionsoft.dbgit.utils.CalcHash;
-import ru.fusionsoft.dbgit.utils.ConsoleWriter;
import ru.fusionsoft.dbgit.yaml.YamlOrder;
/**
- * Meta class for db Table
+ * Meta class for db Table
* @author mikle
*
*/
-public class MetaTable extends MetaBase {
+public class MetaTable extends MetaBase {
- @YamlOrder(1)
+ @YamlOrder(3)
private DBTable table;
-
- @YamlOrder(2)
+
+ @YamlOrder(4)
//private IMapFields fields = new TreeMapFields();
private Map fields = new TreeMap<>();
-
- @YamlOrder(3)
+
+ @YamlOrder(5)
private Map indexes = new TreeMap<>();
-
- @YamlOrder(4)
+
+ @YamlOrder(6)
private Map constraints = new TreeMap<>();
- public MetaTable() {
+ public MetaTable() {
setDbType();
setDbVersion();
}
-
+
public MetaTable(String namePath) {
setDbType();
setDbVersion();
this.name = namePath;
}
-
+
public MetaTable(DBTable tbl) {
setDbType();
setDbVersion();
setTable(tbl);
}
-
+
@Override
public DBGitMetaType getType() {
return DBGitMetaType.DBGitTable;
@@ -71,7 +65,7 @@ public boolean serialize(OutputStream stream) throws IOException {
}
@Override
- public IMetaObject deSerialize(InputStream stream) throws IOException {
+ public IMetaObject deSerialize(InputStream stream) {
return yamlDeSerialize(stream);
}
@@ -79,12 +73,15 @@ public IMetaObject deSerialize(InputStream stream) throws IOException {
public boolean loadFromDB() throws ExceptionDBGit {
IDBAdapter adapter = AdapterFactory.createAdapter();
NameMeta nm = MetaObjectFactory.parseMetaName(getName());
-
- DBTable tbl = adapter.getTable(nm.getSchema(), nm.getName());
- if (tbl != null)
- return loadFromDB(tbl);
- else
+ try {
+ DBTable tbl = adapter.getTable(nm.getSchema(), nm.getName());
+ if (tbl != null)
+ return loadFromDB(tbl);
+ else
+ return false;
+ } catch (ExceptionDBGitObjectNotFound exnf) {
return false;
+ }
}
public boolean loadFromDB(DBTable tbl) throws ExceptionDBGit {
@@ -126,7 +123,13 @@ public boolean loadFromDB(DBTable tbl) throws ExceptionDBGit {
@Override
public String getHash() {
- CalcHash ch = new CalcHash();
+ CalcHash ch = new CalcHash()/*{
+ @Override
+ public CalcHash addData(String str){
+ ConsoleWriter.printlnRed(str);
+ return super.addData(str);
+ }
+ }*/;
ch.addData(this.getName());
if (getTable() != null) {
@@ -142,20 +145,17 @@ public String getHash() {
}
- if (indexes != null) {
- for (String item : indexes.keySet()) {
- ch.addData(item);
- ch.addData(indexes.get(item).getHash());
+ for (String item : indexes.keySet()) {
+ if(constraints.containsKey(item)) continue;
+ ch.addData(item);
+ ch.addData(indexes.get(item).getHash());
- }
}
-
- if (constraints != null) {
- for (String item : constraints.keySet()) {
- ch.addData(item);
- ch.addData(constraints.get(item).getHash());
- }
+ for (String item : constraints.keySet()) {
+ ch.addData(item);
+ ch.addData(constraints.get(item).getHash());
+
}
return ch.calcHashStr();
@@ -213,15 +213,27 @@ public void setConstraints(Map constraints) {
this.constraints.putAll(constraints);
}
- public List getIdColumns() {
- List idColumns = new ArrayList<>();
-
+ public List getIdColumns() {
+ List idColumns = new ArrayList<>();
+
+ int i = 0;
for (DBTableField field : fields.values()) {
if (field.getIsPrimaryKey()) {
- idColumns.add(field.getName());
+ //idColumns.add(field.getName());
+ idColumns.add(i);
}
+ i++;
}
return idColumns;
}
+// private String truncateHash(String hash){
+// return hash.substring(
+// 0,
+// 2
+// ) + hash.substring(
+// hash.length() - 3,
+// hash.length() - 1
+// );
+// }
}
diff --git a/src/main/java/ru/fusionsoft/dbgit/meta/MetaTableData.java b/src/main/java/ru/fusionsoft/dbgit/meta/MetaTableData.java
index 34f4400..fa3042d 100644
--- a/src/main/java/ru/fusionsoft/dbgit/meta/MetaTableData.java
+++ b/src/main/java/ru/fusionsoft/dbgit/meta/MetaTableData.java
@@ -1,374 +1,453 @@
-package ru.fusionsoft.dbgit.meta;
-
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.nio.charset.Charset;
-import java.sql.ResultSet;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.csv.CSVFormat;
-import org.apache.commons.csv.CSVParser;
-import org.apache.commons.csv.CSVPrinter;
-import org.apache.commons.csv.CSVRecord;
-import org.apache.commons.csv.QuoteMode;
-
-import com.diogonunes.jcdp.color.api.Ansi.FColor;
-
-import ru.fusionsoft.dbgit.adapters.AdapterFactory;
-import ru.fusionsoft.dbgit.adapters.IDBAdapter;
-import ru.fusionsoft.dbgit.core.DBGit;
-import ru.fusionsoft.dbgit.core.DBGitConfig;
-import ru.fusionsoft.dbgit.core.DBGitLang;
-import ru.fusionsoft.dbgit.core.DBGitPath;
-import ru.fusionsoft.dbgit.core.ExceptionDBGit;
-import ru.fusionsoft.dbgit.core.ExceptionDBGitRunTime;
-import ru.fusionsoft.dbgit.core.GitMetaDataManager;
-import ru.fusionsoft.dbgit.data_table.ICellData;
-import ru.fusionsoft.dbgit.data_table.RowData;
-import ru.fusionsoft.dbgit.data_table.TreeMapRowData;
-import ru.fusionsoft.dbgit.dbobjects.DBTable;
-import ru.fusionsoft.dbgit.dbobjects.DBTableData;
-import ru.fusionsoft.dbgit.utils.CalcHash;
-import ru.fusionsoft.dbgit.utils.ConsoleWriter;
-
-/**
- * Meta class for Table data
- * @author mikle
- *
- */
-public class MetaTableData extends MetaBase {
- protected DBTable table = null;
- private DBTableData dataTable = null;
-
- private TreeMapRowData mapRows = null;
-
- public MetaTableData() {
- setDbType();
- setDbVersion();
- }
-
- public MetaTableData(DBTable tbl) throws ExceptionDBGit {
- setDbType();
- setDbVersion();
- setTable(tbl);
- }
-
-
- public DBTable getTable() {
- return table;
- }
-
- public TreeMap getmapRows() {
- return mapRows;
- }
-
- public DBTableData getDataTable() {
- return dataTable;
- }
-
- public void setMapRows(TreeMapRowData mapRows) {
- this.mapRows = mapRows;
- }
-
- public void setDataTable(DBTableData dataTable) {
- this.dataTable = dataTable;
- }
-
- public void setTable(DBTable table) throws ExceptionDBGit {
- this.table = table;
- setName(table.getSchema()+"/"+table.getName()+"."+getType().getValue());
- }
-
-
-
- @Override
- public void setName(String name) throws ExceptionDBGit {
- if (table == null) {
- NameMeta nm = MetaObjectFactory.parseMetaName(name);
- table = new DBTable();
- table.setSchema(nm.getSchema());
- table.setName(nm.getName());
- }
-
- super.setName(name);
- }
-
- @Override
- public DBGitMetaType getType() {
- return DBGitMetaType.DbGitTableData;
- }
-
- public CSVFormat getCSVFormat() {
- return CSVFormat.DEFAULT
- //.withRecordSeparator("\n")
- .withDelimiter(';')
- .withNullString("")
- .withQuote('"')
- //.withQuoteMode(QuoteMode.ALL)
- ;
- }
-
- public MetaTable getMetaTable() throws ExceptionDBGit {
- String metaTblName = table.getSchema()+"/"+table.getName()+"."+DBGitMetaType.DBGitTable.getValue();
- GitMetaDataManager gmdm = GitMetaDataManager.getInstance();
-
- IMapMetaObject dbObjs = gmdm.getCacheDBMetaData();
- MetaTable metaTable = (MetaTable) dbObjs.get(metaTblName);
- if (metaTable == null ) {
- metaTable = new MetaTable();
- metaTable.loadFromDB(table);
- }
- return metaTable;
- }
-
- public MetaTable getMetaTableFromFile() throws ExceptionDBGit {
- String metaTblName = table.getSchema()+"/"+table.getName()+"."+DBGitMetaType.DBGitTable.getValue();
- GitMetaDataManager gmdm = GitMetaDataManager.getInstance();
-
- MetaTable metaTable = (MetaTable)gmdm.loadMetaFile(metaTblName);
- if (metaTable != null)
- return metaTable;
-
- return getMetaTable();
- }
-
-
- @Override
- public boolean serialize(OutputStream stream) throws Exception {
- Integer count = 0;
- Set fields = null;
-
- if (mapRows == null) {
- return false;
- }
-
- CSVPrinter csvPrinter = new CSVPrinter(new OutputStreamWriter(stream), getCSVFormat());
-
- for (RowData rd : mapRows.values()) {
- if (count == 0) {
- fields = rd.getData().keySet();
- csvPrinter.printRecord(fields);
- }
-
- rd.saveDataToCsv(csvPrinter, getTable());
-
- count++;
- }
- csvPrinter.close();
- return true;
- }
-
- @Override
- public IMetaObject deSerialize(InputStream stream) throws Exception {
-
- MetaTable metaTable = getMetaTableFromFile();
-
- CSVParser csvParser = new CSVParser(new InputStreamReader(stream), getCSVFormat());
- List csvRecords = csvParser.getRecords();
-
- if (csvRecords.size() > 0) {
- CSVRecord titleColumns = csvRecords.get(0);
-
- mapRows = new TreeMapRowData();
-
- for (int i = 1; i < csvRecords.size(); i++) {
- RowData rd = new RowData(csvRecords.get(i), metaTable, titleColumns);
- mapRows.put(rd);
- }
- }
-
- csvParser.close();
-
- //saveToFile("test");
-
- return this;
- }
-
- public boolean loadPortionFromDB(int currentPortionIndex) throws ExceptionDBGit {
- return loadPortionFromDB(currentPortionIndex, 0);
- }
-
- public boolean loadPortionFromDB(int currentPortionIndex, int tryNumber) throws ExceptionDBGit {
- try {
- IDBAdapter adapter = AdapterFactory.createAdapter();
- MetaTable metaTable = getMetaTable();
- if (metaTable.getFields().size() == 0)
- return false;
-
- dataTable = adapter.getTableDataPortion(table.getSchema(), table.getName(), currentPortionIndex, 0);
-
- ResultSet rs = dataTable.getResultSet();
-
- if (dataTable.getErrorFlag() > 0) {
- ConsoleWriter.printlnColor(DBGitLang.getInstance().getValue("errors", "meta", "tooManyRecords").
- withParams(getName(), String.valueOf(IDBAdapter.MAX_ROW_COUNT_FETCH)), FColor.RED, 0);
- return false;
- }
-
- mapRows = new TreeMapRowData();
-
- while(rs.next()){
- RowData rd = new RowData(rs, metaTable);
- mapRows.put(rd);
- }
- return true;
- } catch (Exception e) {
- e.printStackTrace();
- ConsoleWriter.println(e.getMessage());
- ConsoleWriter.println(e.getLocalizedMessage());
-
- try {
- if (tryNumber <= DBGitConfig.getInstance().getInteger("core", "TRY_COUNT", DBGitConfig.getInstance().getIntegerGlobal("core", "TRY_COUNT", 1000))) {
- try {
- TimeUnit.SECONDS.sleep(DBGitConfig.getInstance().getInteger("core", "TRY_DELAY", DBGitConfig.getInstance().getIntegerGlobal("core", "TRY_DELAY", 1000)));
- } catch (InterruptedException e1) {
- throw new ExceptionDBGitRunTime(e1.getMessage());
- }
- ConsoleWriter.println("Error while getting portion of data, try " + tryNumber);
- return loadPortionFromDB(currentPortionIndex, tryNumber++);
- }
- } catch (Exception e1) {
- // TODO Auto-generated catch block
- e1.printStackTrace();
- }
-
- if (e instanceof ExceptionDBGit)
- throw (ExceptionDBGit)e;
- throw new ExceptionDBGit(e);
- }
- }
-
- @Override
- public boolean loadFromDB() throws ExceptionDBGit {
- try {
- IDBAdapter adapter = AdapterFactory.createAdapter();
-
- MetaTable metaTable = getMetaTable();
-
- if (metaTable.getFields().size() == 0)
- return false;
-
- List idColumns = metaTable.getIdColumns();
-
- dataTable = adapter.getTableData(table.getSchema(), table.getName());
-
- if (dataTable.getErrorFlag() > 0) {
- ConsoleWriter.printlnColor(DBGitLang.getInstance().getValue("errors", "meta", "tooManyRecords").
- withParams(getName(), String.valueOf(IDBAdapter.MAX_ROW_COUNT_FETCH)), FColor.RED, 0);
- return false;
- }
-
- ResultSet rs = dataTable.getResultSet();
-
- mapRows = new TreeMapRowData();
-
- //System.out.println("load from db file "+getName());
- while(rs.next()){
- RowData rd = new RowData(rs, metaTable);
- mapRows.put(rd);
- }
- return true;
- /*
- System.out.println("******************************************");
- System.out.println();
- */
- } catch (Exception e) {
- e.printStackTrace();
- if (e instanceof ExceptionDBGit)
- throw (ExceptionDBGit)e;
- throw new ExceptionDBGit(e);
- }
-
- }
-
- public void diff(MetaTableData ob) throws Exception {
- if (mapRows.size() != ob.mapRows.size()) {
- System.out.println(DBGitLang.getInstance().getValue("general", "meta", "diffSize1").withParams(String.valueOf(mapRows.size()), String.valueOf(ob.mapRows.size())));
- }
- for (String rowHash : mapRows.keySet()) {
- RowData r1 = mapRows.get(rowHash);
- RowData r2 = ob.mapRows.get(rowHash);
-
- System.out.println(rowHash);
- System.out.println(r1.getData()+ " "+ r2.getData());
-
- if (r1.getData().size() != r2.getData().size()) {
- System.out.println(DBGitLang.getInstance().getValue("general", "meta", "diffSize2").withParams(rowHash));
- }
-
- for (String col : r1.getData().keySet()) {
- String d1 = r1.getData().get(col).convertToString();
- String d2 = r2.getData().get(col).convertToString();
-
- if (d1 != d2) {
- if (!d1.equals(r2.getData().get(col))) {
- System.out.println(DBGitLang.getInstance().getValue("general", "meta", "diffDataRow").
- withParams(rowHash, col, r1.getData().get(col).toString(), r2.getData().get(col).toString()));
- }
- }
- }
- }
- }
-
-
- @Override
- public String getHash() {
- CalcHash ch = new CalcHash();
- if (mapRows == null)
- return EMPTY_HASH;
-
- if (mapRows.size() == 0)
- return EMPTY_HASH;
-
- //System.out.println(getName());
- int n = 0;
- for (RowData rd : mapRows.values()) {
- ch.addData(rd.getHashRow());
- //System.out.println("row "+n+" "+rd.getHashRow());
- n++;
- }
-
- return ch.calcHashStr();
- }
-
- @Override
- public int addToGit() throws ExceptionDBGit {
- int count = super.addToGit();
-
- if (mapRows == null) return count;
-
- for (RowData rd : mapRows.values()) {
- for (ICellData cd : rd.getData().values()) {
- count += cd.addToGit();
- }
- }
-
- return count;
- }
-
- @Override
- public int removeFromGit() throws ExceptionDBGit {
- int count = super.removeFromGit();
-
- if (mapRows == null)
- return 1;
-
- for (RowData rd : mapRows.values()) {
- for (ICellData cd : rd.getData().values()) {
- count += cd.removeFromGit();
- }
- }
-
- return count;
- }
-
-}
+package ru.fusionsoft.dbgit.meta;
+
+import java.io.*;
+import java.nio.charset.StandardCharsets;
+import java.sql.ResultSet;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.concurrent.TimeUnit;
+
+import de.siegmar.fastcsv.reader.CsvParser;
+import de.siegmar.fastcsv.reader.CsvReader;
+import de.siegmar.fastcsv.reader.CsvRow;
+
+import org.apache.commons.csv.CSVFormat;
+import org.apache.commons.csv.CSVParser;
+import org.apache.commons.csv.CSVPrinter;
+import org.apache.commons.csv.CSVRecord;
+
+import com.diogonunes.jcdp.color.api.Ansi.FColor;
+
+import org.apache.commons.lang3.exception.ExceptionUtils;
+import org.slf4j.Logger;
+import ru.fusionsoft.dbgit.adapters.AdapterFactory;
+import ru.fusionsoft.dbgit.adapters.IDBAdapter;
+import ru.fusionsoft.dbgit.core.DBGitConfig;
+import ru.fusionsoft.dbgit.core.DBGitLang;
+import ru.fusionsoft.dbgit.core.ExceptionDBGit;
+import ru.fusionsoft.dbgit.core.ExceptionDBGitRunTime;
+import ru.fusionsoft.dbgit.core.GitMetaDataManager;
+import ru.fusionsoft.dbgit.data_table.ICellData;
+import ru.fusionsoft.dbgit.data_table.MapFileData;
+import ru.fusionsoft.dbgit.data_table.RowData;
+import ru.fusionsoft.dbgit.data_table.TreeMapRowData;
+import ru.fusionsoft.dbgit.dbobjects.DBTable;
+import ru.fusionsoft.dbgit.dbobjects.DBTableData;
+import ru.fusionsoft.dbgit.utils.CalcHash;
+import ru.fusionsoft.dbgit.utils.ConsoleWriter;
+import ru.fusionsoft.dbgit.utils.LoggerUtil;
+
+/**
+ * Meta class for Table data
+ * @author mikle
+ *
+ */
+public class MetaTableData extends MetaBase {
+ private Logger logger = LoggerUtil.getLogger(this.getClass());
+ protected DBTable table = null;
+ private DBTableData dataTable = null;
+
+ private TreeMapRowData mapRows = null;
+ private List fields = new ArrayList<>();
+
+ public MetaTableData() {
+ setDbType();
+ setDbVersion();
+ }
+
+ public MetaTableData(DBTable tbl) throws ExceptionDBGit {
+ setDbType();
+ setDbVersion();
+ setTable(tbl);
+ }
+
+
+ public DBTable getTable() {
+ return table;
+ }
+
+ public TreeMap getmapRows() {
+ return mapRows;
+ }
+
+ public DBTableData getDataTable() {
+ return dataTable;
+ }
+
+ public void setMapRows(TreeMapRowData mapRows) {
+ this.mapRows = mapRows;
+ }
+
+ public void setDataTable(DBTableData dataTable) {
+ this.dataTable = dataTable;
+ }
+
+ public void setTable(DBTable table) throws ExceptionDBGit {
+ this.table = table;
+ setName(table.getSchema()+"/"+table.getName()+"."+getType().getValue());
+ }
+
+ public void setFields(List fields) {
+ this.fields = fields;
+ }
+
+ @Override
+ public void setName(String name) throws ExceptionDBGit {
+ if (table == null) {
+ NameMeta nm = MetaObjectFactory.parseMetaName(name);
+ table = new DBTable.OnlyNameDBTable(nm.getName(), nm.getSchema());
+ }
+
+ super.setName(name);
+ }
+
+ @Override
+ public DBGitMetaType getType() {
+ return DBGitMetaType.DbGitTableData;
+ }
+
+ public CSVFormat getCSVFormat() {
+ return CSVFormat.DEFAULT
+ //.withRecordSeparator("\n")
+ .withDelimiter(';')
+ .withNullString("")
+ .withQuote('"')
+ //.withQuoteMode(QuoteMode.ALL)
+ ;
+ }
+
+ public MetaTable getMetaTable() throws ExceptionDBGit {
+ String metaTblName = table.getSchema()+"/"+table.getName()+"."+DBGitMetaType.DBGitTable.getValue();
+ GitMetaDataManager gmdm = GitMetaDataManager.getInstance();
+
+ IMapMetaObject dbObjs = gmdm.getCacheDBMetaData();
+ MetaTable metaTable = (MetaTable) dbObjs.get(metaTblName);
+ if (metaTable == null ) {
+ metaTable = new MetaTable();
+ metaTable.loadFromDB(table);
+ }
+ return metaTable;
+ }
+
+ public MetaTable getMetaTableFromFile() throws ExceptionDBGit {
+ String metaTblName = table.getSchema()+"/"+table.getName()+"."+DBGitMetaType.DBGitTable.getValue();
+ GitMetaDataManager gmdm = GitMetaDataManager.getInstance();
+
+ MetaTable metaTable = (MetaTable)gmdm.loadMetaFile(metaTblName);
+ if (metaTable != null)
+ return metaTable;
+
+ //TODO ... which is not from file, but from db
+ return getMetaTable();
+ }
+
+
+ @Override
+ public boolean serialize(OutputStream stream) throws Exception {
+ Integer count = 0;
+ Set fields = null;
+
+ if (mapRows == null) {
+ return false;
+ }
+
+ CSVPrinter csvPrinter = new CSVPrinter(new OutputStreamWriter(stream), getCSVFormat());
+
+ for (RowData rd : mapRows.values()) {
+ if (count == 0) {
+ fields = rd.getData(this.fields).keySet();
+ csvPrinter.printRecord(fields);
+ }
+
+ rd.saveDataToCsv(csvPrinter, getTable());
+
+ count++;
+ }
+ csvPrinter.close();
+ return true;
+ }
+
+ @Override
+ public IMetaObject deSerialize(File file) throws Exception {
+ MetaTable metaTable = getMetaTableFromFile();
+
+ CsvReader csvReader = new CsvReader();
+ csvReader.setFieldSeparator(';');
+ csvReader.setContainsHeader(false);
+ int i = 1;
+
+ try (CsvParser csvParser = csvReader.parse(file, StandardCharsets.UTF_8)) {
+ CsvRow row;
+ boolean flag = false;
+ mapRows = new TreeMapRowData();
+ CsvRow titleColumns = null;
+
+
+ while ((row = csvParser.nextRow()) != null) {
+ if (!flag) {
+ titleColumns = row;
+ fields = row.getFields();
+// System.err.println("fields = " + fields);
+ } else {
+ RowData rd = new RowData(row, metaTable, titleColumns);
+ mapRows.put(rd);
+ i++;
+ }
+ flag = true;
+ }
+ } catch (Throwable ex){
+ ConsoleWriter.detailsPrint(DBGitLang.getInstance().getValue("general", "meta", "loadRow").withParams(String.valueOf(i) ));
+ warnFilesNotFound();
+ throw ex;
+ }
+ ConsoleWriter.detailsPrint(DBGitLang.getInstance().getValue("general", "meta", "loadedRow").withParams(String.valueOf(i) ));
+ warnFilesNotFound();
+
+ return this;
+ }
+
+
+ @Override
+ @Deprecated
+ public IMetaObject deSerialize(InputStream stream) throws Exception {
+
+ MetaTable metaTable = getMetaTableFromFile();
+
+ CSVParser csvParser = new CSVParser(new InputStreamReader(stream), getCSVFormat());
+ List csvRecords = csvParser.getRecords();
+
+ if (csvRecords.size() > 0) {
+ CSVRecord titleColumns = csvRecords.get(0);
+ fields.clear();
+ for (int i = 0; i < csvRecords.get(0).size(); i++) {
+ fields.add(csvRecords.get(0).get(i));
+ }
+
+ mapRows = new TreeMapRowData();
+
+ for (int i = 1; i < csvRecords.size(); i++) {
+ RowData rd = new RowData(csvRecords.get(i), metaTable, titleColumns);
+ mapRows.put(rd);
+ }
+ }
+
+
+ csvParser.close();
+
+ //saveToFile("test");
+
+ return this;
+ }
+
+ public boolean loadPortionFromDB(int currentPortionIndex) throws ExceptionDBGit {
+ return loadPortionFromDB(currentPortionIndex, 0);
+ }
+
+ public boolean loadPortionFromDB(int currentPortionIndex, int tryNumber) throws ExceptionDBGit {
+ try {
+ IDBAdapter adapter = AdapterFactory.createAdapter();
+ MetaTable metaTable = getMetaTable();
+ if (metaTable.getFields().size() == 0)
+ return false;
+
+ dataTable = adapter.getTableDataPortion(table.getSchema(), table.getName(), currentPortionIndex, 0);
+
+ ResultSet rs = dataTable.resultSet();
+
+ if (dataTable.errorFlag() > 0) {
+ final String tooManyRecordsMsg = DBGitLang.getInstance().getValue("errors", "meta", "tooManyRecords").withParams(getName(), String.valueOf(IDBAdapter.MAX_ROW_COUNT_FETCH));
+ ConsoleWriter.printlnColor(tooManyRecordsMsg, FColor.RED, 0);
+ return false;
+ }
+
+ mapRows = new TreeMapRowData();
+
+ boolean flag = false;
+ while(rs.next()){
+
+ if (!flag) {
+ fields.clear();
+ for (int i = 0; i < rs.getMetaData().getColumnCount(); i++) {
+ String columnName = rs.getMetaData().getColumnName(i + 1);
+ if (columnName.equalsIgnoreCase("DBGIT_ROW_NUM"))
+ continue;
+ fields.add(columnName);
+ }
+ }
+
+ flag = true;
+ RowData rd = new RowData(rs, metaTable);
+ mapRows.put(rd);
+ }
+
+
+
+ return true;
+ } catch (Exception e) {
+
+ ConsoleWriter.println(e.getLocalizedMessage(), messageLevel);
+ ConsoleWriter.detailsPrintln(ExceptionUtils.getStackTrace(e), messageLevel);
+ logger.error(DBGitLang.getInstance().getValue("errors", "adapter", "tableData").toString(), e);
+
+ try {
+ if (tryNumber <= DBGitConfig.getInstance().getInteger("core", "TRY_COUNT", DBGitConfig.getInstance().getIntegerGlobal("core", "TRY_COUNT", 1000))) {
+ try {
+ TimeUnit.SECONDS.sleep(DBGitConfig.getInstance().getInteger("core", "TRY_DELAY", DBGitConfig.getInstance().getIntegerGlobal("core", "TRY_DELAY", 1000)));
+ } catch (InterruptedException e1) {
+ throw new ExceptionDBGitRunTime(e1.getMessage());
+ }
+ ConsoleWriter.println(DBGitLang.getInstance()
+ .getValue("errors", "dataTable", "tryAgain")
+ .withParams(String.valueOf(tryNumber))
+ , messageLevel
+ );
+ return loadPortionFromDB(currentPortionIndex, tryNumber++);
+ }
+ } catch (Exception e1) {
+ throw new ExceptionDBGitRunTime(e1);
+ // TODO Auto-generated catch block
+// e1.printStackTrace();
+ }
+
+ if (e instanceof ExceptionDBGit) throw (ExceptionDBGit)e;
+ throw new ExceptionDBGit(e);
+ }
+ }
+
+ @Override
+ public boolean loadFromDB() throws ExceptionDBGit {
+ try {
+ IDBAdapter adapter = AdapterFactory.createAdapter();
+
+ MetaTable metaTable = getMetaTable();
+
+ if (metaTable.getFields().size() == 0)
+ return false;
+
+ dataTable = adapter.getTableData(table.getSchema(), table.getName());
+
+ if (dataTable.errorFlag() > 0) {
+ ConsoleWriter.printlnColor(DBGitLang.getInstance().getValue("errors", "meta", "tooManyRecords").
+ withParams(getName(), String.valueOf(IDBAdapter.MAX_ROW_COUNT_FETCH)), FColor.RED, 0);
+ return false;
+ }
+
+ ResultSet rs = dataTable.resultSet();
+
+ mapRows = new TreeMapRowData();
+
+ //System.out.println("load from db file "+getName());
+ while(rs.next()){
+ RowData rd = new RowData(rs, metaTable);
+ mapRows.put(rd);
+ }
+ return true;
+ /*
+ System.out.println("******************************************");
+ System.out.println();
+ */
+ } catch (Exception e) {
+ throw new ExceptionDBGit("Error loading table data from DB", e);
+ }
+
+ }
+
+ public void diff(MetaTableData ob) throws Exception {
+ if (mapRows.size() != ob.mapRows.size()) {
+ System.out.println(DBGitLang.getInstance().getValue("general", "meta", "diffSize1").withParams(String.valueOf(mapRows.size()), String.valueOf(ob.mapRows.size())));
+ }
+ for (String rowHash : mapRows.keySet()) {
+ RowData r1 = mapRows.get(rowHash);
+ RowData r2 = ob.mapRows.get(rowHash);
+
+ System.out.println(rowHash);
+ System.out.println(r1.getData(fields)+ " "+ r2.getData(ob.fields));
+
+ if (r1.getData(fields).size() != r2.getData(ob.fields).size()) {
+ System.out.println(DBGitLang.getInstance().getValue("general", "meta", "diffSize2").withParams(rowHash));
+ }
+
+ for (String col : r1.getData(fields).keySet()) {
+ String d1 = r1.getData(fields).get(col).convertToString();
+ String d2 = r2.getData(ob.fields).get(col).convertToString();
+
+ if (d1 != d2) {
+ if (!d1.equals(r2.getData(ob.fields).get(col))) {
+ System.out.println(DBGitLang.getInstance().getValue("general", "meta", "diffDataRow").
+ withParams(rowHash, col, r1.getData(fields).get(col).toString(), r2.getData(ob.fields).get(col).toString()));
+ }
+ }
+ }
+ }
+ }
+
+
+ @Override
+ public String getHash() {
+ CalcHash ch = new CalcHash();
+ if (mapRows == null)
+ return EMPTY_HASH;
+
+ if (mapRows.size() == 0)
+ return EMPTY_HASH;
+
+ //System.out.println(getName());
+ int n = 0;
+ for (RowData rd : mapRows.values()) {
+ ch.addData(rd.getHashRow());
+ //System.out.println("row "+n+" "+rd.getHashRow());
+ n++;
+ }
+
+ return ch.calcHashStr();
+ }
+
+ @Override
+ public int addToGit() throws ExceptionDBGit {
+ int count = super.addToGit();
+
+ if (mapRows == null) return count;
+
+ for (RowData rd : mapRows.values()) {
+ for (ICellData cd : rd.getData(fields).values()) {
+ count += cd.addToGit();
+ }
+ }
+
+ return count;
+ }
+
+ @Override
+ public int removeFromGit() throws ExceptionDBGit {
+ int count = super.removeFromGit();
+
+ if (mapRows == null)
+ return 1;
+
+ for (RowData rd : mapRows.values()) {
+ for (ICellData cd : rd.getData(fields).values()) {
+ count += cd.removeFromGit();
+ }
+ }
+
+ return count;
+ }
+
+ public List getFields() {
+ return fields;
+ }
+
+ private void warnFilesNotFound(){
+ Set filesNotFound = MapFileData.getFilesNotFound();
+ if(filesNotFound != null && filesNotFound.size() > 0){
+ ConsoleWriter.detailsPrintlnColor(DBGitLang.getInstance().getValue("errors", "dataTable", "filesNotFound")
+ .withParams(String.join(";", filesNotFound))
+ , FColor.YELLOW, messageLevel
+ );
+ filesNotFound.clear();
+ }
+
+ }
+}
diff --git a/src/main/java/ru/fusionsoft/dbgit/meta/MetaUDT.java b/src/main/java/ru/fusionsoft/dbgit/meta/MetaUDT.java
new file mode 100644
index 0000000..014fd0d
--- /dev/null
+++ b/src/main/java/ru/fusionsoft/dbgit/meta/MetaUDT.java
@@ -0,0 +1,40 @@
+package ru.fusionsoft.dbgit.meta;
+
+import ru.fusionsoft.dbgit.adapters.AdapterFactory;
+import ru.fusionsoft.dbgit.adapters.IDBAdapter;
+import ru.fusionsoft.dbgit.core.ExceptionDBGit;
+import ru.fusionsoft.dbgit.dbobjects.DBSQLObject;
+import ru.fusionsoft.dbgit.dbobjects.DBUserDefinedType;
+
+public class MetaUDT extends MetaSql {
+ public MetaUDT() {
+ }
+
+ public MetaUDT(DBSQLObject sqlObject) throws ExceptionDBGit {
+ super(sqlObject);
+ }
+
+ /**
+ * @return Type meta object
+ */
+ @Override
+ public final IDBGitMetaType getType() {
+ return DBGitMetaType.DBGitUserDefinedType;
+ }
+
+ /**
+ * load current object from DB
+ */
+ @Override
+ public final boolean loadFromDB() throws ExceptionDBGit {
+ final IDBAdapter adapter = AdapterFactory.createAdapter();
+ final NameMeta nm = MetaObjectFactory.parseMetaName(name);
+ final DBUserDefinedType dbObject = adapter.getUDT(nm.getSchema(), nm.getName());
+
+ if (dbObject != null) {
+ setSqlObject(dbObject);
+ return true;
+ } else
+ return false;
+ }
+}
diff --git a/src/main/java/ru/fusionsoft/dbgit/meta/SortedListMetaObject.java b/src/main/java/ru/fusionsoft/dbgit/meta/SortedListMetaObject.java
index de3c517..16fd538 100644
--- a/src/main/java/ru/fusionsoft/dbgit/meta/SortedListMetaObject.java
+++ b/src/main/java/ru/fusionsoft/dbgit/meta/SortedListMetaObject.java
@@ -8,7 +8,7 @@
import ru.fusionsoft.dbgit.dbobjects.DBTable;
import ru.fusionsoft.dbgit.utils.ConsoleWriter;
-import java.sql.Timestamp;
+
import java.util.*;
import java.util.stream.Collectors;
@@ -28,23 +28,42 @@ public SortedListMetaObject(Collection fromCollection){
calculateImoCrossDependencies();
}
+ public Collection getCollection(){
+ return collection;
+}
+ public List sortFromDependencies() throws ExceptionDBGit {
+ if (listFromDependant == null) {
+ listFromDependant = createSortedList(false);
+ }
+ return listFromDependant;
+
+ }
+ public List sortFromReferenced() throws ExceptionDBGit {
+ if (listFromFree == null) {
+ listFromFree = createSortedList(true);
+ }
+ return listFromFree;
+ }
+
private void calculateImoCrossDependencies(){
- Timestamp timestampBefore = new Timestamp(System.currentTimeMillis());
for(DBGitMetaType metaType : Sets.newHashSet(DBGitMetaType.DBGitTable, DBGitMetaType.DbGitFunction)){
- List objectsOfType = collection.stream().filter(x->x.getType().equals(metaType) ).collect(Collectors.toList());
+ List objectsOfType = collection.stream()
+ .filter( x->x.getType().equals(metaType) )
+ .collect(Collectors.toList());
+
+
Map realNamesToMetaNames = objectsOfType.stream().collect(Collectors.toMap(
- x->x.getUnderlyingDbObject().getSchema() + "." + x.getUnderlyingDbObject().getName(),
- IMetaObject::getName
- )
- );
+ x-> x.getUnderlyingDbObject().getSchema() + "." + x.getUnderlyingDbObject().getName(),
+ IMetaObject::getName
+ ));
for(IMetaObject imo : objectsOfType){
if(imo.getType().equals(DBGitMetaType.DbGitFunction)){
DBSQLObject dbsql = (DBSQLObject) imo.getUnderlyingDbObject();
Set deps = realNamesToMetaNames.keySet().stream()
- .filter( x -> dbsql.getSql().contains(x) && !(dbsql.getSchema()+"."+dbsql.getName()).equals(x) )
+ .filter( x -> dbsql.getSql().contains(x) /*&& !(dbsql.getSchema()+"."+dbsql.getName()).equals(x)*/ )
.map(realNamesToMetaNames::get)
.collect(Collectors.toSet());
dbsql.setDependencies(deps);
@@ -52,111 +71,77 @@ private void calculateImoCrossDependencies(){
if(imo.getType().equals(DBGitMetaType.DBGitTable)){
DBTable dbTable = (DBTable) imo.getUnderlyingDbObject();
Set deps = realNamesToMetaNames.values().stream()
- .filter( x -> dbTable.getDependencies().contains(x) && !x.equals(imo.getName()) )
- .collect(Collectors.toSet());
- dbTable.setDependencies(deps);
+ .filter( x -> dbTable.getDependencies().contains(x) /*&& !x.equals(imo.getName())*/ )
+ .collect(Collectors.toSet());
+ dbTable.getDependencies().addAll(deps);
}
}
}
- Timestamp timestampAfter = new Timestamp(System.currentTimeMillis());
- Long diff = timestampAfter.getTime() - timestampBefore.getTime();
- ConsoleWriter.detailsPrintlnGreen(DBGitLang.getInstance().getValue("general", "time").withParams(diff.toString()));
- };
-
- public List sortFromDependant() throws ExceptionDBGit {
- if (listFromDependant == null) {
- listFromDependant = new ArrayList<>();
- List types = Arrays
- .stream(DBGitMetaType.values())
- .sorted(Comparator.comparing(DBGitMetaType::getPriority).reversed())
- .collect(Collectors.toList());
-
- for (DBGitMetaType tp : types) {
-
- List objectsOfType = collection.stream().filter(x -> x.getType().equals(tp)).collect(Collectors.toList());
- if (!objectsOfType.isEmpty()) {
- if (tp.equals(DBGitMetaType.DBGitTable) || (objectsOfType.get(0) instanceof MetaSql)) {
+ }
- Set namesAllOfType = objectsOfType.stream().map(IMetaObject::getName).collect(Collectors.toSet());
- List objectsL0 = objectsOfType.stream()
- .filter(x -> x.getUnderlyingDbObject().getDependencies().size() == 0)
+ public List createSortedList(boolean isSortedFromFree) throws ExceptionDBGit {
+ List list = new ArrayList<>();
+ Comparator typeComparator = isSortedFromFree
+ ? Comparator.comparing(DBGitMetaType::getPriority)
+ : Comparator.comparing(DBGitMetaType::getPriority).reversed();
+ Comparator imoComparator = isSortedFromFree
+ ? imoDependenceComparator
+ : imoDependenceComparator.reversed();
+
+ List types = Arrays
+ .stream(DBGitMetaType.values())
+ .sorted(typeComparator)
+ .collect(Collectors.toList());
+
+ for (DBGitMetaType tp : types) {
+ List objectsOfType = collection.stream().filter(x -> x.getType().equals(tp)).collect(Collectors.toList());
+ if (!objectsOfType.isEmpty()) {
+ if (tp.equals(DBGitMetaType.DBGitTable) || objectsOfType.get(0) instanceof MetaSql) {
+ Set namesAllOfType = objectsOfType.stream().map(IMetaObject::getName).collect(Collectors.toSet());
+ List objectsL0 = objectsOfType.stream()
+ .filter(x -> {
+ Set deps = x.getUnderlyingDbObject().getDependencies();
+ return deps.size() == 0 || ( deps.size() == 1 && deps.contains(x.getName()) );
+ })
+ .collect(Collectors.toList());
+
+ objectsOfType.removeAll(objectsL0);
+ while (!objectsOfType.isEmpty()) {
+ Set namesL0 = objectsL0.stream().map(IMetaObject::getName).collect(Collectors.toSet());
+ List objectsL1 = objectsOfType
+ .stream()
+ .filter(x -> {
+ Set actualDeps = new HashSet<>(x.getUnderlyingDbObject().getDependencies());
+ actualDeps.retainAll(namesAllOfType); //only deps of same type
+ actualDeps.remove(x.getName());
+ return namesL0.containsAll(actualDeps);
+ })
+ .sorted(imoComparator)
.collect(Collectors.toList());
-
- objectsOfType.removeAll(objectsL0);
- while (!objectsOfType.isEmpty()) {
- Set namesL0 = objectsL0.stream().map(IMetaObject::getName).collect(Collectors.toSet());
- List objectsL1 = objectsOfType
- .stream()
- .filter(x -> {
- Set actualDeps = new HashSet<>(x.getUnderlyingDbObject().getDependencies());
- actualDeps.retainAll(namesAllOfType);
- return namesL0.containsAll(actualDeps);
- })
- .sorted(imoDependenceComparator.reversed())
- .collect(Collectors.toList());
- if (objectsL1.isEmpty()) {
- warnNotAdded(objectsOfType);
- throw new ExceptionDBGit("infinite loop");
- }
- objectsOfType.removeAll(objectsL1);
- objectsL0.addAll(0, objectsL1);
+ if (objectsL1.isEmpty()) {
+ warnNotAdded(objectsOfType);
+ throw new ExceptionDBGit("infinite loop");
}
- listFromDependant.addAll(objectsL0);
- } else {
- listFromDependant.addAll(objectsOfType);
+ objectsOfType.removeAll(objectsL1);
+ if(isSortedFromFree) { objectsL0.addAll(objectsL1); }
+ else { objectsL0.addAll(0, objectsL1); }
}
+ list.addAll(objectsL0);
+ } else {
+ list.addAll(objectsOfType);
}
}
}
- return listFromDependant;
-
- };
- public List sortFromFree() throws ExceptionDBGit {
- if (listFromFree == null) {
- listFromFree = new ArrayList<>();
- List types = Arrays.stream(DBGitMetaType.values())
- .sorted(Comparator.comparing(DBGitMetaType::getPriority))
- .collect(Collectors.toList());
-
- for (DBGitMetaType tp : types) {
- List objectsOfType = collection.stream().filter(x -> x.getType().equals(tp)).collect(Collectors.toList());
- if (!objectsOfType.isEmpty()) {
- if (tp.equals(DBGitMetaType.DBGitTable) || objectsOfType.get(0) instanceof MetaSql) {
- Set namesAllOfType = objectsOfType.stream().map(IMetaObject::getName).collect(Collectors.toSet());
- List objectsL0 = objectsOfType.stream().filter(x -> x.getUnderlyingDbObject().getDependencies().size() == 0).collect(Collectors.toList());
-
- objectsOfType.removeAll(objectsL0);
- while (!objectsOfType.isEmpty()) {
- Set namesL0 = objectsL0.stream().map(IMetaObject::getName).collect(Collectors.toSet());
- List objectsL1 = objectsOfType
- .stream()
- .filter(x -> {
- Set actualDeps = new HashSet<>(x.getUnderlyingDbObject().getDependencies());
- actualDeps.retainAll(namesAllOfType);
- return namesL0.containsAll(actualDeps);
- })
- .sorted(imoDependenceComparator)
- .collect(Collectors.toList());
- if (objectsL1.isEmpty()) {
- warnNotAdded(objectsOfType);
- throw new ExceptionDBGit("infinite loop");
- }
- objectsOfType.removeAll(objectsL1);
- objectsL0.addAll(objectsL1);
- }
- listFromFree.addAll(objectsL0);
- } else {
- listFromFree.addAll(objectsOfType);
- }
- }
+// int i = 0;
+// for(IMetaObject imo : list){
+// ConsoleWriter.printlnRed(MessageFormat.format("{0}. {1}", i++, imo.getName()));
+// }
+ return list;
+ }
- }
- }
- return listFromFree;
- };
public static Comparator imoTypeComparator = Comparator.comparing(x->x.getType().getPriority());
public static Comparator imoDependenceComparator = (o1, o2) -> {
@@ -178,8 +163,11 @@ public List sortFromFree() throws ExceptionDBGit {
};
public void warnNotAdded(List remained){
- ConsoleWriter.detailsPrintlnRed("There were objects with unsatisfied dependencies, " +
- "they will NOT be included in restore list!\n");
+ ConsoleWriter.printlnRed(DBGitLang.getInstance()
+ .getValue("errors", "unsatisfiedDependencies")
+ , 1
+ );
+
remained.forEach( x -> ConsoleWriter.printlnColor(x.getName(), Ansi.FColor.MAGENTA, 1) );
}
diff --git a/src/main/java/ru/fusionsoft/dbgit/meta/TreeMapMetaObject.java b/src/main/java/ru/fusionsoft/dbgit/meta/TreeMapMetaObject.java
index f417e35..d167b3d 100644
--- a/src/main/java/ru/fusionsoft/dbgit/meta/TreeMapMetaObject.java
+++ b/src/main/java/ru/fusionsoft/dbgit/meta/TreeMapMetaObject.java
@@ -36,7 +36,7 @@ public int compare(String nm1, String nm2) {
}
- public TreeMapMetaObject(List from){
+ public TreeMapMetaObject(Collection from){
this();
this.putAll(from.stream().collect(Collectors.toMap(IMetaObject::getName, key->key)));
}
diff --git a/src/main/java/ru/fusionsoft/dbgit/mssql/DBAdapterMssql.java b/src/main/java/ru/fusionsoft/dbgit/mssql/DBAdapterMssql.java
index 2d0c983..f33d99d 100644
--- a/src/main/java/ru/fusionsoft/dbgit/mssql/DBAdapterMssql.java
+++ b/src/main/java/ru/fusionsoft/dbgit/mssql/DBAdapterMssql.java
@@ -1,21 +1,23 @@
package ru.fusionsoft.dbgit.mssql;
+import com.google.common.collect.ImmutableMap;
+import org.apache.commons.lang3.exception.ExceptionUtils;
import org.slf4j.Logger;
import ru.fusionsoft.dbgit.adapters.DBAdapter;
import ru.fusionsoft.dbgit.adapters.IFactoryDBAdapterRestoteMetaData;
import ru.fusionsoft.dbgit.adapters.IFactoryDBBackupAdapter;
import ru.fusionsoft.dbgit.adapters.IFactoryDBConvertAdapter;
-import ru.fusionsoft.dbgit.core.DBGitConfig;
-import ru.fusionsoft.dbgit.core.ExceptionDBGit;
-import ru.fusionsoft.dbgit.core.ExceptionDBGitRunTime;
+import ru.fusionsoft.dbgit.core.*;
import ru.fusionsoft.dbgit.core.db.DbType;
import ru.fusionsoft.dbgit.core.db.FieldType;
import ru.fusionsoft.dbgit.data_table.*;
import ru.fusionsoft.dbgit.dbobjects.*;
import ru.fusionsoft.dbgit.meta.IMapMetaObject;
+import ru.fusionsoft.dbgit.meta.TreeMapMetaObject;
import ru.fusionsoft.dbgit.statement.StatementLogging;
import ru.fusionsoft.dbgit.utils.ConsoleWriter;
import ru.fusionsoft.dbgit.utils.LoggerUtil;
+import ru.fusionsoft.dbgit.utils.StringProperties;
import java.sql.*;
import java.util.*;
@@ -26,18 +28,18 @@ public class DBAdapterMssql extends DBAdapter {
public static final String DEFAULT_MAPPING_TYPE = "varchar";
private static final HashSet systemSchemas = new HashSet<>(Arrays.asList(
- "db_denydatawriter",
- "db_datawriter",
- "db_accessadmin",
- "db_ddladmin",
- "db_securityadmin",
- "db_denydatareader",
- "db_backupoperator",
- "db_datareader",
- "db_owner",
- "sys",
- "INFORMATION_SCHEMA"
- ));
+ "db_denydatawriter",
+ "db_datawriter",
+ "db_accessadmin",
+ "db_ddladmin",
+ "db_securityadmin",
+ "db_denydatareader",
+ "db_backupoperator",
+ "db_datareader",
+ "db_owner",
+ "sys",
+ "INFORMATION_SCHEMA"
+ ));
//Stubs for MSSQL adapter, marked as "TODO Auto-generated method stub"
//And some unfinished implementations marked as "TODO MSSQL *"
@@ -64,33 +66,30 @@ public void endUpdateDB() {
@Override
public IMapMetaObject loadCustomMetaObjects() {
- // TODO Auto-generated method stub
- return null;
+ return new TreeMapMetaObject(Collections.emptyList());
}
@Override
public Map getSchemes() {
- Map listScheme = new HashMap();
- try {
- Connection connect = getConnection();
- DatabaseMetaData meta = connect.getMetaData();
- ResultSet rs = meta.getSchemas();
+ final Map listScheme = new HashMap<>();
+ try (ResultSet rs = getConnection().getMetaData().getSchemas()){
+
// made without query
// Statement stmt = connect.createStatement();
// ResultSet rs = stmt.executeQuery(query);
while(rs.next()){
+ final String name = rs.getString("TABLE_SCHEM");
+
// May also get catalog names that belong to scheme as "TABLE_CATALOG"
- String name = rs.getString("TABLE_SCHEM");
if(!systemSchemas.contains(name)) {
- DBSchema scheme = new DBSchema(name);
- rowToProperties(rs, scheme.getOptions());
+ final DBSchema scheme = new DBSchema(name, new StringProperties(rs));
listScheme.put(name, scheme);
}
}
- //stmt.close();
- }catch(Exception e) {
- logger.error(lang.getValue("errors", "adapter", "schemes").toString(), e);
- throw new ExceptionDBGitRunTime(lang.getValue("errors", "adapter", "schemes").toString(), e);
+
+ } catch(Exception e) {
+ final String msg = lang.getValue("errors", "adapter", "schemes").toString();
+ throw new ExceptionDBGitRunTime(msg, e);
}
return listScheme;
@@ -98,8 +97,8 @@ public Map getSchemes() {
@Override
public Map getTableSpaces() {
-
- String query = "SELECT \n" +
+ final Map listTableSpace = new HashMap<>();
+ final String query = "SELECT \n" +
"[SFG].name AS [File Group Name],\n" +
"[SFG].*,\n" +
"[SDB].name AS [Database Name],\n" +
@@ -158,501 +157,524 @@ public Map getTableSpaces() {
" [filegroup_guid],\n" +
" [log_filegroup_id]\n" +
"DROP TABLE #fgroups\n";
- Map listTableSpace = new HashMap();
- try {
- Connection connect = getConnection();
- Statement stmt = connect.createStatement();
- ResultSet rs = stmt.executeQuery(query);
+
+
+ try (Statement stmt = getConnection().createStatement(); ResultSet rs = stmt.executeQuery(query);){
+
while(rs.next()){
- String name = rs.getString("File Group Name");
- DBTableSpace dbTableSpace = new DBTableSpace(name);
- rowToProperties(rs, dbTableSpace.getOptions());
+ final String name = rs.getString("File Group Name");
+ final DBTableSpace dbTableSpace = new DBTableSpace(name, new StringProperties(rs));
listTableSpace.put(name, dbTableSpace);
}
- stmt.close();
- }catch(Exception e) {
- logger.error(e.getMessage());
- throw new ExceptionDBGitRunTime(e.getMessage());
- }
- return listTableSpace;
- }
- private DBSequence sequenceFromResultSet(ResultSet rs, String schema){
- try {
- String nameSeq = rs.getString("name");
- Long valueSeq = rs.getLong("current_value");
- DBSequence sequence = new DBSequence();
- sequence.setName(nameSeq);
- sequence.setSchema(schema);
- sequence.setValue(valueSeq);
- rowToProperties(rs, sequence.getOptions());
- return sequence;
- } catch (Exception ex){
- logger.error(ex.getMessage(), ex);
- throw new ExceptionDBGitRunTime(ex.getMessage(), ex);
+ } catch(Exception e) {
+ final String msg = lang.getValue("errors", "adapter", "tablespace").toString();
+ throw new ExceptionDBGitRunTime(msg, e);
}
+
+ return listTableSpace;
}
@Override
public Map getSequences(String schema) {
- Map listSequence = new HashMap();
- try {
- Connection connect = getConnection();
- String query =
- "SELECT seq.*,\n" +
- "TYPE_NAME(seq.system_type_id) as typeName,\n" +
- "SCHEMA_NAME(seq.schema_id) as owner \n" +
- "FROM sys.objects, sys.SEQUENCES seq \n" +
- "WHERE sys.objects.object_id = seq.object_id \n" +
- "AND SCHEMA_NAME(seq.schema_id) = '"+schema+"'";
+ final Map listSequence = new HashMap();
+ final String query =
+ "SELECT seq.*,\n" +
+ "TYPE_NAME(seq.system_type_id) as typeName,\n" +
+ "SCHEMA_NAME(seq.schema_id) as owner \n" +
+ "FROM sys.objects, sys.SEQUENCES seq \n" +
+ "WHERE sys.objects.object_id = seq.object_id \n" +
+ "AND SCHEMA_NAME(seq.schema_id) = '"+schema+"'";
+
+ try(Statement stmtValue = getConnection().createStatement(); ResultSet rs = stmtValue.executeQuery(query)){
- Statement stmt = connect.createStatement();
- ResultSet rs = stmt.executeQuery(query);
while(rs.next()){
- String nameSeq = rs.getString("name");
- listSequence.put(nameSeq, sequenceFromResultSet(rs, schema));
+ final String ownerSeq = "dbo";
+ final String nameSeq = rs.getString("name");
+ final Long valueSeq = rs.getLong("current_value");
+ final DBSequence seq = new DBSequence(nameSeq, new StringProperties(rs), schema, ownerSeq, Collections.emptySet(), valueSeq);
+ listSequence.put(nameSeq, seq);
}
- stmt.close();
- }catch(Exception e) {
- logger.error(e.getMessage(), e);
- throw new ExceptionDBGitRunTime(e.getMessage(), e);
+
+ } catch(Exception e) {
+ final String msg = lang.getValue("errors", "adapter", "seq").toString();
+ throw new ExceptionDBGitRunTime(msg, e);
}
+
return listSequence;
}
@Override
public DBSequence getSequence(String schema, String name) {
- try {
- Connection connect = getConnection();
- String query =
- "SELECT seq.*,\n" +
- "USER_NAME(objectproperty(seq.object_id,'OwnerId')) as owner,\n" +
- "TYPE_NAME(seq.system_type_id) as typeName, " +
- "SCHEMA_NAME(seq.schema_id) as schemaName " +
- "FROM sys.objects, sys.SEQUENCES seq " +
- "WHERE sys.objects.object_id = seq.object_id " +
- "AND SCHEMA_NAME(seq.schema_id) = '"+schema+"' " +
- "AND seq.name = '" + name + "'\n";
-
- Statement stmt = connect.createStatement();
- ResultSet rs = stmt.executeQuery(query);
+ final String query =
+ "SELECT seq.*,\n" +
+ "USER_NAME(objectproperty(seq.object_id,'OwnerId')) as owner,\n" +
+ "TYPE_NAME(seq.system_type_id) as typeName, " +
+ "SCHEMA_NAME(seq.schema_id) as schemaName " +
+ "FROM sys.objects, sys.SEQUENCES seq " +
+ "WHERE sys.objects.object_id = seq.object_id " +
+ "AND SCHEMA_NAME(seq.schema_id) = '"+schema+"' " +
+ "AND seq.name = '" + name + "'\n";
+
+ try(Statement stmtValue = getConnection().createStatement(); ResultSet rs = stmtValue.executeQuery(query)){
+
+ if(rs.next()){
+ final String ownerSeq = "dbo";
+ final String nameSeq = rs.getString("name");
+ final Long valueSeq = rs.getLong("current_value");
+ return new DBSequence(nameSeq, new StringProperties(rs), schema, ownerSeq, Collections.emptySet(), valueSeq);
+ } else {
+ final String msg = lang.getValue("errors", "adapter", "objectNotFoundInDb").toString();
+ throw new ExceptionDBGitObjectNotFound(msg);
+ }
- DBSequence sequence = null;
- while (rs.next()) {
- sequence = sequenceFromResultSet(rs, schema);
- }
- stmt.close();
- return sequence;
- }catch(Exception e) {
- logger.error(lang.getValue("errors", "adapter", "sequences").toString(), e);
- throw new ExceptionDBGitRunTime(lang.getValue("errors", "adapter", "sequences").toString(), e);
+ } catch(Exception e) {
+ final String msg = lang.getValue("errors", "adapter", "seq").toString();
+ throw new ExceptionDBGitRunTime(msg, e);
}
}
@Override
public Map getTables(String schema) {
- Map listTable = new HashMap();
- try {
- String query =
- "SELECT TABLE_NAME as 'name', TABLE_CATALOG as 'database', TABLE_SCHEMA as 'schema'\n" +
- "FROM INFORMATION_SCHEMA.TABLES \n" +
- "WHERE INFORMATION_SCHEMA.TABLES.TABLE_SCHEMA = '" + schema + "'\n" +
- "AND INFORMATION_SCHEMA.TABLES.TABLE_TYPE = 'BASE TABLE'";
- Connection connect = getConnection();
-
- Statement stmt = connect.createStatement();
- ResultSet rs = stmt.executeQuery(query);
-
+ final Map listTable = new HashMap<>();
+ final String query =
+ "SELECT TABLE_NAME as 'name', TABLE_CATALOG as 'database', TABLE_SCHEMA as 'schema'\n" +
+ "FROM INFORMATION_SCHEMA.TABLES \n" +
+ "WHERE INFORMATION_SCHEMA.TABLES.TABLE_SCHEMA = '" + schema + "'\n" +
+ "AND INFORMATION_SCHEMA.TABLES.TABLE_TYPE = 'BASE TABLE'";
+
+ try (Statement stmt = getConnection().createStatement(); ResultSet rs = stmt.executeQuery(query);) {
while(rs.next()){
- String nameTable = rs.getString("name");
- DBTable table = new DBTable(nameTable);
- table.setSchema(schema);
- rowToProperties(rs, table.getOptions());
+ //TODO retrieve table comment
+ //TODO retrieve table owner
+ final String nameTable = rs.getString("name");
+ final String ownerTable = "";
+ final String commentTable = "";
+ final StringProperties options = new StringProperties(rs);
+ final Set dependencies = rs.getArray("dependencies") != null
+ ? new HashSet<>(Arrays.asList((String[])rs.getArray("dependencies").getArray()))
+ : Collections.emptySet();
+
+ final DBTable table = new DBTable(nameTable, options, schema, ownerTable, dependencies, commentTable);
listTable.put(nameTable, table);
}
- stmt.close();
- }catch(Exception e) {
- logger.error(lang.getValue("errors", "adapter", "tables").toString(), e);
- throw new ExceptionDBGitRunTime(lang.getValue("errors", "adapter", "tables").toString(), e);
+
+ } catch(Exception e) {
+ final String msg = lang.getValue("errors", "adapter", "tables").toString();
+ throw new ExceptionDBGitRunTime(msg, e);
}
return listTable;
}
@Override
public DBTable getTable(String schema, String name) {
- DBTable table = null;
- try(Statement stmt = getConnection().createStatement()) {
- String query =
- "SELECT\n" +
- " o.name tableName, t.TABLE_SCHEMA schemaName, t.TABLE_CATALOG catalogName,\n" +
- " CASE WHEN o.principal_id is NOT NULL THEN (SELECT name FROM sys.database_principals dp WHERE dp.principal_id=o.principal_id)\n" +
- " ELSE (SELECT dp.name FROM sys.database_principals dp,sys.schemas s WHERE s.schema_id=o.schema_id and s.principal_id=dp.principal_id)\n" +
- " END as owner\n" +
- "FROM sys.objects o, INFORMATION_SCHEMA.TABLES t\n" +
- "WHERE o.type='U' AND o.name = t.TABLE_NAME AND t.TABLE_NAME = '"+name+"' AND t.TABLE_SCHEMA = '"+schema+"'";
-
- ResultSet rs = stmt.executeQuery(query);
+ final String query =
+ "SELECT\n" +
+ " o.name tableName, t.TABLE_SCHEMA schemaName, t.TABLE_CATALOG catalogName,\n" +
+ " CASE WHEN o.principal_id is NOT NULL THEN (SELECT name FROM sys.database_principals dp WHERE dp.principal_id=o.principal_id)\n" +
+ " ELSE (SELECT dp.name FROM sys.database_principals dp,sys.schemas s WHERE s.schema_id=o.schema_id and s.principal_id=dp.principal_id)\n" +
+ " END as owner\n" +
+ "FROM sys.objects o, INFORMATION_SCHEMA.TABLES t\n" +
+ "WHERE o.type='U' AND o.name = t.TABLE_NAME AND t.TABLE_NAME = '"+name+"' AND t.TABLE_SCHEMA = '"+schema+"'";
+
+ try (Statement stmt = getConnection().createStatement(); ResultSet rs = stmt.executeQuery(query);) {
+ if (rs.next()){
+ //TODO retrieve table comment
+ //TODO retrieve table owner
+ final String nameTable = rs.getString("name");
+ final String ownerTable = "";
+ final String commentTable = "";
+ final StringProperties options = new StringProperties(rs);
+ final Set dependencies = rs.getArray("dependencies") != null
+ ? new HashSet<>(Arrays.asList((String[])rs.getArray("dependencies").getArray()))
+ : Collections.emptySet();
+
+ return new DBTable(nameTable, options, schema, ownerTable, dependencies, commentTable);
+ } else {
+ final String msg = lang.getValue("errors", "adapter", "objectNotFoundInDb").toString();
+ throw new ExceptionDBGitObjectNotFound(msg);
+ }
- while(rs.next()){
- String nameTable = rs.getString("tableName");
- table = new DBTable(nameTable);
- table.setSchema(schema);
- rowToProperties(rs, table.getOptions());
- }
- return table;
- }catch(Exception e) {
- logger.error(lang.getValue("errors", "adapter", "tables").toString(), e);
- throw new ExceptionDBGitRunTime(lang.getValue("errors", "adapter", "tables").toString(), e);
+ } catch(Exception e) {
+ final String msg = lang.getValue("errors", "adapter", "tables").toString();
+ throw new ExceptionDBGitRunTime(msg, e);
}
}
@Override
public Map getTableFields(String schema, String nameTable) {
- Map listField = new HashMap<>();
- try(Statement stmt = getConnection().createStatement()) {
- String query =
- "SELECT DISTINCT\n" +
- " c.TABLE_SCHEMA as schemaName,\n" +
- " c.TABLE_NAME as tableName,\n" +
- " c.COLUMN_NAME as columnName,\n" +
- " c.ORDINAL_POSITION as columnOrder,\n" +
- " c.DATA_TYPE as mssqlType,\n" +
- " CASE WHEN lower(c.DATA_TYPE) in ('bigint', 'int', 'float', 'decimal', 'money', 'numeric', 'real', 'smallint', 'smallmoney', 'tinyint') then 'number' \n" +
- " when lower(c.DATA_TYPE) in ('char','varchar','xml','nchar','nvarchar', 'uniqueidentifier') then 'string'\n" +
- " when lower(c.DATA_TYPE) in ('bit') then 'boolean'\n" +
- " when lower(c.DATA_TYPE) in ('datetime', 'smalldatetime', 'time') then 'date'\n" +
- " when lower(c.DATA_TYPE) in ('text','ntext') then 'text'\n" +
- " when lower(c.DATA_TYPE) in ('timestamp', 'binary', 'varbinary', 'geometry', 'geography') then 'binary'\n" +
- " else 'native'\n" +
- " end dbgitType,\n" +
- " CASE WHEN 1 IN ( \n" +
- " SELECT OBJECTPROPERTY(OBJECT_ID(CONSTRAINT_SCHEMA + '.' + QUOTENAME(CONSTRAINT_NAME)),'IsPrimaryKey')\n" +
- " FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE\n" +
- " WHERE c.COLUMN_NAME = COLUMN_NAME AND c.TABLE_NAME = TABLE_NAME\n" +
- " )\n" +
- " THEN 1 ELSE 0 END isPk,\n" +
- " c.IS_NULLABLE as isNullable,\n" +
- " c.NUMERIC_SCALE as scale,\n" +
- " c.CHARACTER_MAXIMUM_LENGTH as length,\n" +
- " CASE WHEN lower(c.DATA_TYPE) in ('char', 'nchar') then '1' else '0' end isFixed," +
- " c.NUMERIC_PRECISION as precision\n" +
- "FROM INFORMATION_SCHEMA.COLUMNS as c\n" +
- "WHERE TABLE_SCHEMA = '" + schema + "' AND TABLE_NAME = '" + nameTable + "'";
+ final Map listField = new HashMap<>();
+ final String query =
+ "SELECT DISTINCT\n" +
+ " c.TABLE_SCHEMA as schemaName,\n" +
+ " c.TABLE_NAME as tableName,\n" +
+ " c.COLUMN_NAME as columnName,\n" +
+ " c.ORDINAL_POSITION as columnOrder,\n" +
+ " c.DATA_TYPE as mssqlType,\n" +
+ " CASE WHEN lower(c.DATA_TYPE) in ('bigint', 'int', 'float', 'decimal', 'money', 'numeric', 'real', 'smallint', 'smallmoney', 'tinyint') then 'number' \n" +
+ " when lower(c.DATA_TYPE) in ('char','varchar','xml','nchar','nvarchar', 'uniqueidentifier') then 'string'\n" +
+ " when lower(c.DATA_TYPE) in ('bit') then 'boolean'\n" +
+ " when lower(c.DATA_TYPE) in ('datetime', 'smalldatetime', 'time') then 'date'\n" +
+ " when lower(c.DATA_TYPE) in ('text','ntext') then 'text'\n" +
+ " when lower(c.DATA_TYPE) in ('timestamp', 'binary', 'varbinary', 'geometry', 'geography') then 'binary'\n" +
+ " else 'native'\n" +
+ " end dbgitType,\n" +
+ " CASE WHEN 1 IN ( \n" +
+ " SELECT OBJECTPROPERTY(OBJECT_ID(CONSTRAINT_SCHEMA + '.' + QUOTENAME(CONSTRAINT_NAME)),'IsPrimaryKey')\n" +
+ " FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE\n" +
+ " WHERE c.COLUMN_NAME = COLUMN_NAME AND c.TABLE_NAME = TABLE_NAME\n" +
+ " )\n" +
+ " THEN 1 ELSE 0 END isPk,\n" +
+ " c.IS_NULLABLE as isNullable,\n" +
+ " c.NUMERIC_SCALE as scale,\n" +
+ " c.CHARACTER_MAXIMUM_LENGTH as length,\n" +
+ " CASE WHEN lower(c.DATA_TYPE) in ('char', 'nchar') then '1' else '0' end isFixed," +
+ " c.NUMERIC_PRECISION as precision\n" +
+ "FROM INFORMATION_SCHEMA.COLUMNS as c\n" +
+ "WHERE TABLE_SCHEMA = '" + schema + "' AND TABLE_NAME = '" + nameTable + "'";
+
+ try(Statement stmt = getConnection().createStatement(); ResultSet rs = stmt.executeQuery(query);) {
- ResultSet rs = stmt.executeQuery(query);
while(rs.next()){
- DBTableField field = DBTableFieldFromRs(rs);
+ final DBTableField field = DBTableFieldFromRs(rs);
listField.put(field.getName(), field);
}
- return listField;
- }catch(Exception e) {
- logger.error(lang.getValue("errors", "adapter", "tables").toString(), e);
- throw new ExceptionDBGitRunTime(lang.getValue("errors", "adapter", "tables").toString(), e);
+
+ } catch(Exception e) {
+ final String msg = lang.getValue("errors", "adapter", "tableData").toString();
+ throw new ExceptionDBGitRunTime(msg, e);
}
+
+ return listField;
}
private DBTableField DBTableFieldFromRs(ResultSet rs) throws SQLException {
- DBTableField field = new DBTableField();
- field.setName(rs.getString("columnName").toLowerCase());
- if (rs.getString("isPk").equals("1")) {
- field.setIsPrimaryKey(true);
- }
- field.setTypeSQL(getFieldType(rs));
- field.setTypeUniversal(FieldType.fromString(rs.getString("dbgitType").toUpperCase()));
- field.setLength(rs.getInt("length"));
- field.setScale(rs.getInt("scale"));
- field.setPrecision(rs.getInt("precision"));
- field.setFixed(rs.getBoolean("isFixed"));
- field.setOrder(rs.getInt("columnOrder"));
- return field;
+ final boolean isPrimaryKey = rs.getString("isPk").equals("1");
+ final boolean isFixed = rs.getBoolean("isFixed");
+ final boolean isNullable = rs.getBoolean("isNullable");
+ final String columnName = rs.getString("columnName").toLowerCase();
+ //TODO make find out column comment
+ final String columnDesc = "";
+ //TODO make find out column default value
+ final String columnDefault = "";
+ final String typeSQL = getFieldType(rs);
+ final FieldType typeUniversal = FieldType.fromString(rs.getString("dbgitType").toUpperCase());
+ final int length = rs.getInt("length");
+ final int scale = rs.getInt("scale");
+ final int precision = rs.getInt("precision");
+ final int order = rs.getInt("order");
+
+ return new DBTableField(
+ columnName,
+ columnDesc == null ? "" : columnDesc,
+ isPrimaryKey, isNullable,
+ typeSQL, typeUniversal, order,
+ columnDefault == null ? "" : columnDefault,
+ length, precision, scale, isFixed
+ );
+
}
- protected String getFieldType(ResultSet rs) {
- try {
- StringBuilder type = new StringBuilder();
- type.append(rs.getString("mssqlType"));
+ protected String getFieldType(ResultSet rs) throws SQLException {
- Integer max_length = rs.getInt("length");
- if (!rs.wasNull()) {
- type.append("("+max_length.toString()+")");
- }
- if (rs.getString("isNullable").equals("NO")){
- type.append(" NOT NULL");
- }
+ final StringBuilder type = new StringBuilder();
+ final Integer max_length = rs.getInt("length");
+ final String mssqlType = rs.getString("mssqlType");
+ final boolean isNotNull = rs.getString("isNullable").equals("NO");
- return type.toString();
- }catch(Exception e) {
- logger.error(lang.getValue("errors", "adapter", "tables").toString(), e);
- throw new ExceptionDBGitRunTime(lang.getValue("errors", "adapter", "tables").toString(), e);
+ type.append(mssqlType);
+ if (!rs.wasNull()) {
+ type.append("("+max_length.toString()+")");
+ }
+ if (isNotNull){
+ type.append(" NOT NULL");
}
+
+ return type.toString();
+
}
public Map getIndexesWithPks(String schema, String nameTable) {
- Map indexes = new HashMap<>();
- try (Statement stmt = getConnection().createStatement()){
- String query =
- " SELECT DB_NAME() AS databaseName,\n" +
- " sc.name as schemaName, \n" +
- " t.name AS tableName,\n" +
- " col.name as columnName,\n" +
- " si.name AS indexName,\n" +
- " si.is_primary_key isPk," +
- " si.index_id as indexId,\n" +
- " si.type_desc as typeName, \n" +
- " CASE si.index_id WHEN 0 THEN NULL\n" +
- " ELSE \n" +
- " CASE is_primary_key WHEN 1 THEN\n" +
- " N'ALTER TABLE ' + QUOTENAME(sc.name) + N'.' + QUOTENAME(t.name) + N' ADD CONSTRAINT ' + QUOTENAME(si.name) + N' PRIMARY KEY ' +\n" +
- " CASE WHEN si.index_id > 1 THEN N'NON' ELSE N'' END + N'CLUSTERED '\n" +
- " ELSE N'CREATE ' + \n" +
- " CASE WHEN si.is_unique = 1 then N'UNIQUE ' ELSE N'' END +\n" +
- " CASE WHEN si.index_id > 1 THEN N'NON' ELSE N'' END + N'CLUSTERED ' +\n" +
- " N'INDEX ' + QUOTENAME(si.name) + N' ON ' + QUOTENAME(sc.name) + N'.' + QUOTENAME(t.name) + N' '\n" +
- " END +\n" +
- " /* key def */ N'(' + key_definition + N')' +\n" +
- " /* includes */ CASE WHEN include_definition IS NOT NULL THEN \n" +
- " N' INCLUDE (' + include_definition + N')'\n" +
- " ELSE N''\n" +
- " END +\n" +
- " /* filters */ CASE WHEN filter_definition IS NOT NULL THEN \n" +
- " N' WHERE ' + filter_definition ELSE N''\n" +
- " END +\n" +
- " /* with clause - compression goes here */\n" +
- " CASE WHEN row_compression_partition_list IS NOT NULL OR page_compression_partition_list IS NOT NULL \n" +
- " THEN N' WITH (' +\n" +
- " CASE WHEN row_compression_partition_list IS NOT NULL THEN\n" +
- " N'DATA_COMPRESSION = ROW ' + CASE WHEN psc.name IS NULL THEN N'' ELSE + N' ON PARTITIONS (' + row_compression_partition_list + N')' END\n" +
- " ELSE N'' END +\n" +
- " CASE WHEN row_compression_partition_list IS NOT NULL AND page_compression_partition_list IS NOT NULL THEN N', ' ELSE N'' END +\n" +
- " CASE WHEN page_compression_partition_list IS NOT NULL THEN\n" +
- " N'DATA_COMPRESSION = PAGE ' + CASE WHEN psc.name IS NULL THEN N'' ELSE + N' ON PARTITIONS (' + page_compression_partition_list + N')' END\n" +
- " ELSE N'' END\n" +
- " + N')'\n" +
- " ELSE N''\n" +
- " END +\n" +
- " ' ON ' + CASE WHEN psc.name is null \n" +
- " THEN ISNULL(QUOTENAME(fg.name),N'')\n" +
- " ELSE psc.name + N' (' + partitioning_column.column_name + N')' \n" +
- " END\n" +
- " + N';'\n" +
- " END AS ddl,\n" +
- " si.has_filter,\n" +
- " si.is_unique,\n" +
- " ISNULL(pf.name, NULL) AS partition_function,\n" +
- " ISNULL(psc.name, fg.name) AS partition_scheme_or_filegroup\n" +
- "FROM sys.indexes AS si \n" +
- "JOIN sys.index_columns ic ON si.object_id = ic.object_id and si.index_id = ic.index_id \n" +
- "JOIN sys.columns col ON ic.object_id = col.object_id and ic.column_id = col.column_id \n" +
- "JOIN sys.tables AS t ON si.object_id=t.object_id\n" +
- "JOIN sys.schemas AS sc ON t.schema_id=sc.schema_id\n" +
- "LEFT JOIN sys.dm_db_index_usage_stats AS stat ON \n" +
- " stat.database_id = DB_ID() \n" +
- " and si.object_id=stat.object_id \n" +
- " and si.index_id=stat.index_id\n" +
- "LEFT JOIN sys.partition_schemes AS psc ON si.data_space_id=psc.data_space_id\n" +
- "LEFT JOIN sys.partition_functions AS pf ON psc.function_id=pf.function_id\n" +
- "LEFT JOIN sys.filegroups AS fg ON si.data_space_id=fg.data_space_id\n" +
- "OUTER APPLY ( SELECT STUFF (\n" +
- " (SELECT N', ' + QUOTENAME(c.name) +\n" +
- " CASE ic.is_descending_key WHEN 1 then N' DESC' ELSE N'' END\n" +
- " FROM sys.index_columns AS ic \n" +
- " JOIN sys.columns AS c ON \n" +
- " ic.column_id=c.column_id \n" +
- " and ic.object_id=c.object_id\n" +
- " WHERE ic.object_id = si.object_id\n" +
- " and ic.index_id=si.index_id\n" +
- " and ic.key_ordinal > 0\n" +
- " ORDER BY ic.key_ordinal FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'),1,2,'')) AS keys ( key_definition )\n" +
- "OUTER APPLY (\n" +
- " SELECT MAX(QUOTENAME(c.name)) AS column_name\n" +
- " FROM sys.index_columns AS ic \n" +
- " JOIN sys.columns AS c ON \n" +
- " ic.column_id=c.column_id \n" +
- " and ic.object_id=c.object_id\n" +
- " WHERE ic.object_id = si.object_id\n" +
- " and ic.index_id=si.index_id\n" +
- " and ic.partition_ordinal = 1) AS partitioning_column\n" +
- "OUTER APPLY ( SELECT STUFF (\n" +
- " (SELECT N', ' + QUOTENAME(c.name)\n" +
- " FROM sys.index_columns AS ic \n" +
- " JOIN sys.columns AS c ON \n" +
- " ic.column_id=c.column_id \n" +
- " and ic.object_id=c.object_id\n" +
- " WHERE ic.object_id = si.object_id\n" +
- " and ic.index_id=si.index_id\n" +
- " and ic.is_included_column = 1\n" +
- " ORDER BY c.name FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'),1,2,'')) AS includes ( include_definition )\n" +
- "OUTER APPLY ( SELECT STUFF (\n" +
- " (SELECT N', ' + CAST(p.partition_number AS VARCHAR(32))\n" +
- " FROM sys.partitions AS p\n" +
- " WHERE p.object_id = si.object_id\n" +
- " and p.index_id=si.index_id\n" +
- " and p.data_compression = 1\n" +
- " ORDER BY p.partition_number FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'),1,2,'')) AS row_compression_clause ( row_compression_partition_list )\n" +
- "OUTER APPLY ( SELECT STUFF (\n" +
- " (SELECT N', ' + CAST(p.partition_number AS VARCHAR(32))\n" +
- " FROM sys.partitions AS p\n" +
- " WHERE p.object_id = si.object_id\n" +
- " and p.index_id=si.index_id\n" +
- " and p.data_compression = 2\n" +
- " ORDER BY p.partition_number FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'),1,2,'')) AS page_compression_clause ( page_compression_partition_list )\n" +
- "WHERE si.type IN (1,2) /* clustered, nonclustered */\n" +
+ final Map indexes = new HashMap<>();
+ final String query =
+ " SELECT DB_NAME() AS databaseName,\n" +
+ " sc.name as schemaName, \n" +
+ " t.name AS tableName,\n" +
+ " col.name as columnName,\n" +
+ " si.name AS indexName,\n" +
+ " si.is_primary_key isPk," +
+ " si.index_id as indexId,\n" +
+ " si.type_desc as typeName, \n" +
+ " CASE si.index_id WHEN 0 THEN NULL\n" +
+ " ELSE \n" +
+ " CASE is_primary_key WHEN 1 THEN\n" +
+ " N'ALTER TABLE ' + QUOTENAME(sc.name) + N'.' + QUOTENAME(t.name) + N' ADD CONSTRAINT ' + QUOTENAME(si.name) + N' PRIMARY KEY ' +\n" +
+ " CASE WHEN si.index_id > 1 THEN N'NON' ELSE N'' END + N'CLUSTERED '\n" +
+ " ELSE N'CREATE ' + \n" +
+ " CASE WHEN si.is_unique = 1 then N'UNIQUE ' ELSE N'' END +\n" +
+ " CASE WHEN si.index_id > 1 THEN N'NON' ELSE N'' END + N'CLUSTERED ' +\n" +
+ " N'INDEX ' + QUOTENAME(si.name) + N' ON ' + QUOTENAME(sc.name) + N'.' + QUOTENAME(t.name) + N' '\n" +
+ " END +\n" +
+ " /* key def */ N'(' + key_definition + N')' +\n" +
+ " /* includes */ CASE WHEN include_definition IS NOT NULL THEN \n" +
+ " N' INCLUDE (' + include_definition + N')'\n" +
+ " ELSE N''\n" +
+ " END +\n" +
+ " /* filters */ CASE WHEN filter_definition IS NOT NULL THEN \n" +
+ " N' WHERE ' + filter_definition ELSE N''\n" +
+ " END +\n" +
+ " /* with clause - compression goes here */\n" +
+ " CASE WHEN row_compression_partition_list IS NOT NULL OR page_compression_partition_list IS NOT NULL \n" +
+ " THEN N' WITH (' +\n" +
+ " CASE WHEN row_compression_partition_list IS NOT NULL THEN\n" +
+ " N'DATA_COMPRESSION = ROW ' + CASE WHEN psc.name IS NULL THEN N'' ELSE + N' ON PARTITIONS (' + row_compression_partition_list + N')' END\n" +
+ " ELSE N'' END +\n" +
+ " CASE WHEN row_compression_partition_list IS NOT NULL AND page_compression_partition_list IS NOT NULL THEN N', ' ELSE N'' END +\n" +
+ " CASE WHEN page_compression_partition_list IS NOT NULL THEN\n" +
+ " N'DATA_COMPRESSION = PAGE ' + CASE WHEN psc.name IS NULL THEN N'' ELSE + N' ON PARTITIONS (' + page_compression_partition_list + N')' END\n" +
+ " ELSE N'' END\n" +
+ " + N')'\n" +
+ " ELSE N''\n" +
+ " END +\n" +
+ " ' ON ' + CASE WHEN psc.name is null \n" +
+ " THEN ISNULL(QUOTENAME(fg.name),N'')\n" +
+ " ELSE psc.name + N' (' + partitioning_column.column_name + N')' \n" +
+ " END\n" +
+ " + N';'\n" +
+ " END AS ddl,\n" +
+ " si.has_filter,\n" +
+ " si.is_unique,\n" +
+ " ISNULL(pf.name, NULL) AS partition_function,\n" +
+ " ISNULL(psc.name, fg.name) AS partition_scheme_or_filegroup\n" +
+ "FROM sys.indexes AS si \n" +
+ "JOIN sys.index_columns ic ON si.object_id = ic.object_id and si.index_id = ic.index_id \n" +
+ "JOIN sys.columns col ON ic.object_id = col.object_id and ic.column_id = col.column_id \n" +
+ "JOIN sys.tables AS t ON si.object_id=t.object_id\n" +
+ "JOIN sys.schemas AS sc ON t.schema_id=sc.schema_id\n" +
+ "LEFT JOIN sys.dm_db_index_usage_stats AS stat ON \n" +
+ " stat.database_id = DB_ID() \n" +
+ " and si.object_id=stat.object_id \n" +
+ " and si.index_id=stat.index_id\n" +
+ "LEFT JOIN sys.partition_schemes AS psc ON si.data_space_id=psc.data_space_id\n" +
+ "LEFT JOIN sys.partition_functions AS pf ON psc.function_id=pf.function_id\n" +
+ "LEFT JOIN sys.filegroups AS fg ON si.data_space_id=fg.data_space_id\n" +
+ "OUTER APPLY ( SELECT STUFF (\n" +
+ " (SELECT N', ' + QUOTENAME(c.name) +\n" +
+ " CASE ic.is_descending_key WHEN 1 then N' DESC' ELSE N'' END\n" +
+ " FROM sys.index_columns AS ic \n" +
+ " JOIN sys.columns AS c ON \n" +
+ " ic.column_id=c.column_id \n" +
+ " and ic.object_id=c.object_id\n" +
+ " WHERE ic.object_id = si.object_id\n" +
+ " and ic.index_id=si.index_id\n" +
+ " and ic.key_ordinal > 0\n" +
+ " ORDER BY ic.key_ordinal FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'),1,2,'')) AS keys ( key_definition )\n" +
+ "OUTER APPLY (\n" +
+ " SELECT MAX(QUOTENAME(c.name)) AS column_name\n" +
+ " FROM sys.index_columns AS ic \n" +
+ " JOIN sys.columns AS c ON \n" +
+ " ic.column_id=c.column_id \n" +
+ " and ic.object_id=c.object_id\n" +
+ " WHERE ic.object_id = si.object_id\n" +
+ " and ic.index_id=si.index_id\n" +
+ " and ic.partition_ordinal = 1) AS partitioning_column\n" +
+ "OUTER APPLY ( SELECT STUFF (\n" +
+ " (SELECT N', ' + QUOTENAME(c.name)\n" +
+ " FROM sys.index_columns AS ic \n" +
+ " JOIN sys.columns AS c ON \n" +
+ " ic.column_id=c.column_id \n" +
+ " and ic.object_id=c.object_id\n" +
+ " WHERE ic.object_id = si.object_id\n" +
+ " and ic.index_id=si.index_id\n" +
+ " and ic.is_included_column = 1\n" +
+ " ORDER BY c.name FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'),1,2,'')) AS includes ( include_definition )\n" +
+ "OUTER APPLY ( SELECT STUFF (\n" +
+ " (SELECT N', ' + CAST(p.partition_number AS VARCHAR(32))\n" +
+ " FROM sys.partitions AS p\n" +
+ " WHERE p.object_id = si.object_id\n" +
+ " and p.index_id=si.index_id\n" +
+ " and p.data_compression = 1\n" +
+ " ORDER BY p.partition_number FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'),1,2,'')) AS row_compression_clause ( row_compression_partition_list )\n" +
+ "OUTER APPLY ( SELECT STUFF (\n" +
+ " (SELECT N', ' + CAST(p.partition_number AS VARCHAR(32))\n" +
+ " FROM sys.partitions AS p\n" +
+ " WHERE p.object_id = si.object_id\n" +
+ " and p.index_id=si.index_id\n" +
+ " and p.data_compression = 2\n" +
+ " ORDER BY p.partition_number FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'),1,2,'')) AS page_compression_clause ( page_compression_partition_list )\n" +
+ "WHERE si.type IN (1,2) /* clustered, nonclustered */\n" +
// "AND si.is_primary_key = 0 /* no PKs */\n" +
- "AND si.is_hypothetical = 0 /* bugged feature, always better to delete, no need to store and reconstruct them */\n" +
- "AND upper(t.name) = upper('" + nameTable + "') AND upper(sc.name) = upper('" + schema + "')" +
- "OPTION (RECOMPILE);";
+ "AND si.is_hypothetical = 0 /* bugged feature, always better to delete, no need to store and reconstruct them */\n" +
+ "AND upper(t.name) = upper('" + nameTable + "') AND upper(sc.name) = upper('" + schema + "')" +
+ "OPTION (RECOMPILE);";
+
+ try (Statement stmt = getConnection().createStatement(); ResultSet rs = stmt.executeQuery(query);){
- ResultSet rs = stmt.executeQuery(query);
while(rs.next()){
- DBIndex index = new DBIndex();
- index.setName(rs.getString("indexName"));
- index.setSchema(schema);
- rowToProperties(rs, index.getOptions());
+ final String name = rs.getString("indexName");
+ final String owner = rs.getString("owner");
+ final String sql = rs.getString("ddl");
+ final DBIndex index = new DBIndex(name, new StringProperties(rs), schema, owner, Collections.emptySet(), sql);
+
indexes.put(index.getName(), index);
}
return indexes;
- }catch(Exception e) {
- logger.error(lang.getValue("errors", "adapter", "indexes").toString());
- throw new ExceptionDBGitRunTime(lang.getValue("errors", "adapter", "indexes").toString(), e);
+ } catch(Exception e) {
+ final String msg = lang.getValue("errors", "adapter", "indexes").toString();
+ throw new ExceptionDBGitRunTime(msg, e);
}
}
@Override
public Map getIndexes(String schema, String nameTable){
- Map indexes = getIndexesWithPks(schema, nameTable);
+ final Map indexes = getIndexesWithPks(schema, nameTable);
+
indexes.values().removeIf(x->x.getOptions().getChildren().get("ispk").getData().equals("1"));
return indexes;
}
@Override
public Map getConstraints(String schema, String nameTable) {
- Map constraints = new HashMap<>();
- ArrayList queries = new ArrayList<>();
+ final Map constraints = new HashMap<>();
+ final ArrayList queries = new ArrayList<>();
//TODO [] in object names
//check
- queries.add("SELECT sc.name as schemaName, t.name as tableName, col.name as columnName, c.name as constraintName, c.name as indexName, c.type_desc as constraintType, \n" +
- "'ALTER TABLE ' + sc.name + '.' + t.name + ' ADD CONSTRAINT ' + c.name + ' CHECK ' + c.definition + ';' as ddl\n" +
- "FROM sys.check_constraints c\n" +
- "JOIN sys.tables t ON c.parent_object_id = t.object_id \n" +
- "LEFT OUTER JOIN sys.columns col on col.column_id = c.parent_column_id AND col.object_id = c.parent_object_id\n" +
- "JOIN sys.schemas AS sc ON t.schema_id=sc.schema_id \n" +
- "WHERE t.name = ? AND sc.name = ?");
+ queries.add(
+ "SELECT sc.name as schemaName, t.name as tableName, col.name as columnName, c.name as constraintName, c.name as indexName, c.type_desc as constraintType, \n" +
+ "'ALTER TABLE ' + sc.name + '.' + t.name + ' ADD CONSTRAINT ' + c.name + ' CHECK ' + c.definition + ';' as ddl\n" +
+ "FROM sys.check_constraints c\n" +
+ "JOIN sys.tables t ON c.parent_object_id = t.object_id \n" +
+ "LEFT OUTER JOIN sys.columns col on col.column_id = c.parent_column_id AND col.object_id = c.parent_object_id\n" +
+ "JOIN sys.schemas AS sc ON t.schema_id=sc.schema_id \n" +
+ "WHERE t.name = :name AND sc.name = :schema");
//default
- queries.add("SELECT sc.name AS schemaName, t.name AS tableName, col.name AS columnName, c.name AS constraintName, c.type_desc AS constraintType, \n" +
- "'ALTER TABLE ' + sc.name + '.' + t.name + ' ADD CONSTRAINT ' + c.name+ ' DEFAULT ' \n" +
- " + CASE WHEN ISNUMERIC(ic.COLUMN_DEFAULT) = 1 \n" +
- " THEN TRY_CONVERT(nvarchar, TRY_CONVERT(numeric, ic.COLUMN_DEFAULT))\n" +
- " ELSE '' + ic.COLUMN_DEFAULT + '' END\n" +
- " + ' FOR [' + col.name + '];' AS ddl\n" +
- "FROM sys.default_constraints c\n" +
- "JOIN sys.tables t ON c.parent_object_id = t.object_id \n" +
- "JOIN sys.columns col ON col.default_object_id = c.object_id\n" +
- "JOIN sys.schemas AS sc ON t.schema_id=sc.schema_id \n" +
- "JOIN INFORMATION_SCHEMA.COLUMNS ic on t.name = ic.TABLE_NAME AND col.name = ic.COLUMN_NAME \n" +
- "WHERE t.name = ? AND sc.name = ?\n");
+ queries.add(
+ "SELECT sc.name AS schemaName, t.name AS tableName, col.name AS columnName, c.name AS constraintName, c.type_desc AS constraintType, \n" +
+ "'ALTER TABLE ' + sc.name + '.' + t.name + ' ADD CONSTRAINT ' + c.name+ ' DEFAULT ' \n" +
+ " + CASE WHEN ISNUMERIC(ic.COLUMN_DEFAULT) = 1 \n" +
+ " THEN TRY_CONVERT(nvarchar, TRY_CONVERT(numeric, ic.COLUMN_DEFAULT))\n" +
+ " ELSE '' + ic.COLUMN_DEFAULT + '' END\n" +
+ " + ' FOR [' + col.name + '];' AS ddl\n" +
+ "FROM sys.default_constraints c\n" +
+ "JOIN sys.tables t ON c.parent_object_id = t.object_id \n" +
+ "JOIN sys.columns col ON col.default_object_id = c.object_id\n" +
+ "JOIN sys.schemas AS sc ON t.schema_id=sc.schema_id \n" +
+ "JOIN INFORMATION_SCHEMA.COLUMNS ic on t.name = ic.TABLE_NAME AND col.name = ic.COLUMN_NAME \n" +
+ "WHERE t.name = :name AND sc.name = :schema\n"
+ );
//unique
- queries.add("SELECT TC.TABLE_SCHEMA AS schemaName, TC.TABLE_NAME AS tableName, CC.Column_Name AS columnName, TC.Constraint_Name AS constraintName, TC.CONSTRAINT_TYPE AS constraintType,\n" +
- "'ALTER TABLE ' + TC.TABLE_SCHEMA + '.' + TC.TABLE_NAME + ' ADD CONSTRAINT ' + TC.CONSTRAINT_NAME + ' UNIQUE NONCLUSTERED ([' + CC.COLUMN_NAME + ']);' AS ddl\n" +
- "FROM INFORMATION_SCHEMA.table_constraints TC\n" +
- "INNER JOIN INFORMATION_SCHEMA.constraint_column_usage CC on TC.Constraint_Name = CC.Constraint_Name\n" +
- "WHERE TC.constraint_type = 'Unique' AND TC.TABLE_NAME = ? AND TC.TABLE_SCHEMA = ? ---- PARAMETER 1,2\n");
+ queries.add(
+ "SELECT TC.TABLE_SCHEMA AS schemaName, TC.TABLE_NAME AS tableName, CC.Column_Name AS columnName, TC.Constraint_Name AS constraintName, TC.CONSTRAINT_TYPE AS constraintType,\n" +
+ "'ALTER TABLE ' + TC.TABLE_SCHEMA + '.' + TC.TABLE_NAME + ' ADD CONSTRAINT ' + TC.CONSTRAINT_NAME + ' UNIQUE NONCLUSTERED ([' + CC.COLUMN_NAME + ']);' AS ddl\n" +
+ "FROM INFORMATION_SCHEMA.table_constraints TC\n" +
+ "INNER JOIN INFORMATION_SCHEMA.constraint_column_usage CC on TC.Constraint_Name = CC.Constraint_Name\n" +
+ "WHERE TC.constraint_type = 'Unique' AND TC.TABLE_NAME = :name AND TC.TABLE_SCHEMA = :schema ---- PARAMETER 1,2\n"
+ );
//foreign
- queries.add("SELECT ss.name as schemaName, t.name as tableName, sc.name as columnName, o.name as constraintName, o.type_desc as constraintType, refss.name as refSchemaName, refst.name as refTableName, refsc.name as refColumnName, " +
- "'ALTER TABLE ' + ss.name + '.' + t.name + ' ADD CONSTRAINT ' + o.name + ' FOREIGN KEY ('+ sc.name + ') references ' + refss.name + '.' + refst.name + '(' + refsc.name + ');' as ddl\n" +
- "FROM sys.foreign_key_columns c\n" +
- "JOIN sys.objects o ON c.constraint_object_id = o.object_id\n" +
- "LEFT OUTER JOIN sys.tables t on t.object_id = c.parent_object_id \n" +
- "LEFT OUTER JOIN sys.schemas ss on ss.schema_id = o.schema_id \n" +
- "LEFT OUTER JOIN sys.columns sc on sc.object_id = c.parent_object_id AND sc.column_id = c.parent_column_id\n" +
- "LEFT OUTER JOIN sys.tables refst on refst.object_id = c.referenced_object_id\n" +
- "LEFT OUTER JOIN sys.schemas refss on refss.schema_id = refst.schema_id\n" +
- "LEFT OUTER JOIN sys.columns refsc on refsc.object_id = c.referenced_object_id AND refsc.column_id = c.referenced_column_id \n" +
- "WHERE t.name = ? AND ss.name = ?\n"
+ queries.add(
+ "SELECT ss.name as schemaName, t.name as tableName, sc.name as columnName, o.name as constraintName, o.type_desc as constraintType, refss.name as refSchemaName, refst.name as refTableName, refsc.name as refColumnName, " +
+ "'ALTER TABLE ' + ss.name + '.' + t.name + ' ADD CONSTRAINT ' + o.name + ' FOREIGN KEY ('+ sc.name + ') references ' + refss.name + '.' + refst.name + '(' + refsc.name + ');' as ddl\n" +
+ "FROM sys.foreign_key_columns c\n" +
+ "JOIN sys.objects o ON c.constraint_object_id = o.object_id\n" +
+ "LEFT OUTER JOIN sys.tables t on t.object_id = c.parent_object_id \n" +
+ "LEFT OUTER JOIN sys.schemas ss on ss.schema_id = o.schema_id \n" +
+ "LEFT OUTER JOIN sys.columns sc on sc.object_id = c.parent_object_id AND sc.column_id = c.parent_column_id\n" +
+ "LEFT OUTER JOIN sys.tables refst on refst.object_id = c.referenced_object_id\n" +
+ "LEFT OUTER JOIN sys.schemas refss on refss.schema_id = refst.schema_id\n" +
+ "LEFT OUTER JOIN sys.columns refsc on refsc.object_id = c.referenced_object_id AND refsc.column_id = c.referenced_column_id \n" +
+ "WHERE t.name = :name AND ss.name = :schema\n"
);
+ final Iterator it = queries.iterator();
+ while (it.hasNext()) {
+ final String query = it.next();
+ try (
+ PreparedStatement stmt = preparedStatement(getConnection(), query, ImmutableMap.of("name", nameTable, "schema" , schema));
+ ResultSet rs = stmt.executeQuery(query);
+ ){
- Iterator it = queries.iterator();
- try {
- while (it.hasNext()) {
- String query = it.next();
- PreparedStatement stmt = connect.prepareStatement(query);
- stmt.setString(2, schema);
- stmt.setString(1, nameTable);
- ResultSet rs = stmt.executeQuery();
-
-
- while (rs.next()) {
- DBConstraint con = new DBConstraint();
- con.setName(rs.getString("constraintName"));
- con.setConstraintType(rs.getString("constraintType"));
- con.setSchema(schema);
- rowToProperties(rs, con.getOptions());
- constraints.put(con.getName(), con);
- }
- stmt.close();
- }
+ while (rs.next()) {
+ final String name = rs.getString("constraintName");
+ final String type = rs.getString("constraintType");
+ final String sql = rs.getString("ddl");
+ final String owner = schema;
- //primary keys
- Map indexes = getIndexesWithPks(schema, nameTable);
- indexes.values().removeIf(x->x.getOptions().getChildren().get("ispk").getData().equals("0"));
- for(DBIndex pki:indexes.values()){
- DBConstraint pkc = new DBConstraint();
- pkc.setName(pki.getName());
- pkc.setConstraintType(pki.getOptions().getChildren().get("typename").getData());
- pkc.setSchema(pki.getSchema());
- pkc.setOptions(pki.getOptions());
- constraints.put(pkc.getName(), pkc);
+ final DBConstraint con = new DBConstraint(name, new StringProperties(rs), schema, owner, Collections.emptySet(), sql, type);
+ constraints.put(con.getName(), con);
+ }
+ } catch (Exception ex){
+ final String msg = lang.getValue("errors", "adapter", "constraints").toString();
+ throw new ExceptionDBGitRunTime(msg, ex);
}
+ }
+
+ //primary keys
+ final Map indexes = getIndexesWithPks(schema, nameTable);
+ indexes.values().removeIf(x->x.getOptions().getChildren().get("ispk").getData().equals("0"));
+
+ for( DBIndex pki : indexes.values() ){
+ final String constraintType = pki.getOptions().getChildren().get("typename").getData();
+ final DBConstraint pkc = new DBConstraint(
+ pki.getName(),
+ pki.getOptions(),
+ pki.getSchema(),
+ pki.getOwner(),
+ new HashSet<>(pki.getDependencies()),
+ pki.getSql(),
+ constraintType
+ );
+ pkc.setOptions(pki.getOptions());
+ constraints.put(pkc.getName(), pkc);
+ }
- return constraints;
+ return constraints;
- }catch(Exception e) {
- logger.error(lang.getValue("errors", "adapter", "constraints").toString());
- throw new ExceptionDBGitRunTime(lang.getValue("errors", "adapter", "constraints").toString(), e);
- }
}
@Override
public Map getViews(String schema) {
- Map listView = new HashMap();
- try (Statement stmt = getConnection().createStatement()){
- String query =
- "SELECT \n" +
- " sp.name as ownerName, sp.type_desc as ownerType, ss.name AS schemaName, sv.name AS viewName, sm.definition as ddl, \n" +
- " sv.type_desc as typeName, sm.uses_ansi_nulls, sm.uses_quoted_identifier, sm.is_schema_bound, \n" +
- " OBJECTPROPERTYEX(sv.object_id,'IsIndexable') AS IsIndexable,\n" +
- " OBJECTPROPERTYEX(sv.object_id,'IsIndexed') AS IsIndexed\n" +
- "FROM sys.views sv\n" +
- "JOIN sys.schemas ss ON sv.schema_id = ss.schema_id\n" +
- "LEFT OUTER JOIN sys.sql_modules sm on sv.object_id = sm.object_id\n" +
- "LEFT OUTER JOIN sys.database_principals sp on sv.principal_id = sp.principal_id";
-
- ResultSet rs = stmt.executeQuery(query);
+ final Map listView = new HashMap();
+ final String query =
+ "SELECT \n" +
+ " sp.name as ownerName, sp.type_desc as ownerType, ss.name AS schemaName, sv.name AS viewName, sm.definition as ddl, \n" +
+ " sv.type_desc as typeName, sm.uses_ansi_nulls, sm.uses_quoted_identifier, sm.is_schema_bound, \n" +
+ " OBJECTPROPERTYEX(sv.object_id,'IsIndexable') AS IsIndexable,\n" +
+ " OBJECTPROPERTYEX(sv.object_id,'IsIndexed') AS IsIndexed\n" +
+ "FROM sys.views sv\n" +
+ "JOIN sys.schemas ss ON sv.schema_id = ss.schema_id\n" +
+ "LEFT OUTER JOIN sys.sql_modules sm on sv.object_id = sm.object_id\n" +
+ "LEFT OUTER JOIN sys.database_principals sp on sv.principal_id = sp.principal_id";
+
+ try (Statement stmt = getConnection().createStatement(); ResultSet rs = stmt.executeQuery(query);){
+
while(rs.next()){
- DBView view = new DBView(rs.getString("viewName"));
- view.setSchema(rs.getString("schemaName"));
- view.setOwner(rs.getString("ownerName"));
- rowToProperties(rs, view.getOptions());
- listView.put(rs.getString("viewName"), view);
+ final String name = rs.getString("viewName");
+ final String schemaName = rs.getString("schemaName");
+ final String owner = rs.getString("ownerName");
+ final String sql = rs.getString("ddl");
+
+ final DBView view = new DBView(name, new StringProperties(rs), schema, owner, Collections.emptySet(), sql);
+ listView.put(name, view);
}
return listView;
- }catch(Exception e) {
- logger.error(lang.getValue("errors", "adapter", "views") + ": "+ e.getMessage());
- throw new ExceptionDBGitRunTime(lang.getValue("errors", "adapter", "views") + ": " + e.getMessage());
+
+ } catch(Exception e) {
+ final DBGitLang msg = lang.getValue("errors", "adapter", "views");
+ throw new ExceptionDBGitRunTime(msg, e);
}
}
@Override
public DBView getView(String schema, String name) {
+ //TODO single-version query with ExceptionDBGitNotFound
try {
return getViews(schema).get(name);
-
- }catch(Exception e) {
- logger.error(lang.getValue("errors", "adapter", "views").toString() + ": "+ e.getMessage());
- throw new ExceptionDBGitRunTime(lang.getValue("errors", "adapter", "views").toString() + ": "+ e.getMessage());
+ } catch(Exception e) {
+ final DBGitLang msg = lang.getValue("errors", "adapter", "views");
+ throw new ExceptionDBGitRunTime(msg, e);
}
}
@@ -665,127 +687,124 @@ public Map getPackages(String schema) {
@Override
public DBPackage getPackage(String schema, String name) {
// No such implementation in MSSQL
- return null;
+ throw new ExceptionDBGitRunTime(new ExceptionDBGitObjectNotFound("cannot get packages on mssql"));
}
@Override
public Map getProcedures(String schema) {
- Map listProcedure = new HashMap();
- try (Statement stmt = getConnection().createStatement()){
- String query =
- "SELECT s.name schemaName, o.name procedureName, o.type_desc as typeName, definition ddl, USER_NAME(so.uid) AS owner \n" +
- "FROM sys.sql_modules m\n" +
- "JOIN sys.procedures p ON m.object_id = p.object_id\n" +
- "JOIN sys.objects o \n" +
- " ON o.object_id = p.object_id \n" +
- " AND Left(o.name, 3) NOT IN ('sp_', 'xp_', 'ms_') \n" +
- "JOIN sys.schemas s ON s.schema_id = o.schema_id\n" +
- "JOIN sysobjects so on o.object_id = so.id\n" +
- "WHERE s.name = '" + schema + "'\n";
-
- ResultSet rs = stmt.executeQuery(query);
+ final Map listProcedure = new HashMap();
+ final String query =
+ "SELECT s.name schemaName, o.name procedureName, o.type_desc as typeName, definition ddl, USER_NAME(so.uid) AS owner \n" +
+ "FROM sys.sql_modules m\n" +
+ "JOIN sys.procedures p ON m.object_id = p.object_id\n" +
+ "JOIN sys.objects o \n" +
+ " ON o.object_id = p.object_id \n" +
+ " AND Left(o.name, 3) NOT IN ('sp_', 'xp_', 'ms_') \n" +
+ "JOIN sys.schemas s ON s.schema_id = o.schema_id\n" +
+ "JOIN sysobjects so on o.object_id = so.id\n" +
+ "WHERE s.name = '" + schema + "'\n";
+
+ try (Statement stmt = getConnection().createStatement(); ResultSet rs = stmt.executeQuery(query);){
while(rs.next()){
- String name = rs.getString("procedureName");
- String owner = rs.getString("owner");
- DBProcedure proc = new DBProcedure(name);
- proc.setSchema(schema);
- proc.setOwner(owner);
- proc.setName(name);
- rowToProperties(rs,proc.getOptions());
+ final String name = rs.getString("procedureName");
+ final String owner = rs.getString("owner");
+ final String sql = rs.getString("ddl");
+ final StringProperties options = new StringProperties(rs);
+ final DBProcedure proc = new DBProcedure(name, options, schema, owner, Collections.emptySet(), sql);
listProcedure.put(name, proc);
}
- stmt.close();
}catch(Exception e) {
- throw new ExceptionDBGitRunTime(lang.getValue("errors", "adapter", "prc").toString(), e);
+ final String msg = lang.getValue("errors", "adapter", "prc").toString();
+ throw new ExceptionDBGitRunTime(msg, e);
}
return listProcedure;
}
@Override
public DBProcedure getProcedure(String schema, String name) {
- try (Statement stmt = getConnection().createStatement()){
- String query =
- "SELECT s.name schemaName, o.name procedureName, o.type_desc as typeName, definition ddl, USER_NAME(so.uid) AS owner \n" +
- "FROM sys.sql_modules m\n" +
- "JOIN sys.procedures p ON m.object_id = p.object_id\n" +
- "JOIN sys.objects o \n" +
- " ON o.object_id = p.object_id \n" +
- " AND Left(o.name, 3) NOT IN ('sp_', 'xp_', 'ms_') -- filter out system ones\n" +
- "JOIN sys.schemas s ON s.schema_id = o.schema_id\n" +
- "JOIN sysobjects so on o.object_id = so.id \n" +
- "WHERE s.name = '" + schema + "' AND o.name = '" + name + "'";
- ResultSet rs = stmt.executeQuery(query);
- DBProcedure proc = null;
-
- while (rs.next()) {
- proc = new DBProcedure(rs.getString("procedureName"));
- String owner = rs.getString("owner");
- proc.setSchema(schema);
- proc.setOwner(owner);
- rowToProperties(rs,proc.getOptions());
- }
- stmt.close();
-
- return proc;
-
- }catch(Exception e) {
- throw new ExceptionDBGitRunTime(lang.getValue("errors", "adapter", "prc").toString(), e);
+ final String query =
+ "SELECT s.name schemaName, o.name procedureName, o.type_desc as typeName, definition ddl, USER_NAME(so.uid) AS owner \n" +
+ "FROM sys.sql_modules m\n" +
+ "JOIN sys.procedures p ON m.object_id = p.object_id\n" +
+ "JOIN sys.objects o \n" +
+ " ON o.object_id = p.object_id \n" +
+ " AND Left(o.name, 3) NOT IN ('sp_', 'xp_', 'ms_') -- filter out system ones\n" +
+ "JOIN sys.schemas s ON s.schema_id = o.schema_id\n" +
+ "JOIN sysobjects so on o.object_id = so.id \n" +
+ "WHERE s.name = '" + schema + "' AND o.name = '" + name + "'";
+
+ try (Statement stmt = getConnection().createStatement(); ResultSet rs = stmt.executeQuery(query);){
+
+ if (!rs.next()) throw new ExceptionDBGitObjectNotFound("");
+
+ final String owner = rs.getString("owner");
+ final String procedureName = rs.getString("procedureName");
+ final String sql = rs.getString("ddl");
+ final StringProperties options = new StringProperties(rs);
+
+ return new DBProcedure(procedureName, options, schema, owner, Collections.emptySet(), sql);
+
+ } catch(Exception e) {
+ final String msg = lang.getValue("errors", "adapter", "prc").toString();
+ throw new ExceptionDBGitRunTime(msg, e);
}
}
@Override
public Map getFunctions(String schema) {
- Map listFunction = new HashMap<>();
- try (Statement stmt = getConnection().createStatement()){
- String query =
- "SELECT ss.name schemaName, o.name functionName, type_desc typeName, definition ddl, USER_NAME(so.uid) owner \n" +
- "FROM sys.sql_modules m \n" +
- "INNER JOIN sys.objects o ON m.object_id = o.object_id\n" +
- "INNER JOIN sysobjects so ON m.object_id = so.id\n" +
- "INNER JOIN sys.schemas ss ON ss.schema_id = o.schema_id\n" +
- "WHERE type_desc like '%function%' AND ss.name = '" + schema + "'\n";
+ final Map listFunction = new HashMap<>();
+ final String query =
+ "SELECT ss.name schemaName, o.name functionName, type_desc typeName, definition ddl, USER_NAME(so.uid) owner \n" +
+ "FROM sys.sql_modules m \n" +
+ "INNER JOIN sys.objects o ON m.object_id = o.object_id\n" +
+ "INNER JOIN sysobjects so ON m.object_id = so.id\n" +
+ "INNER JOIN sys.schemas ss ON ss.schema_id = o.schema_id\n" +
+ "WHERE type_desc like '%function%' AND ss.name = '" + schema + "'\n";
+
+ try (Statement stmt = getConnection().createStatement(); ResultSet rs = stmt.executeQuery(query);){
- ResultSet rs = stmt.executeQuery(query);
while(rs.next()){
- String name = rs.getString("functionName");
- String owner = rs.getString("owner");
- DBFunction func = new DBFunction(name);
- func.setSchema(schema);
- func.setOwner(owner);
- rowToProperties(rs,func.getOptions());
+ final String name = rs.getString("functionName");
+ final String owner = rs.getString("owner");
+ final String sql = rs.getString("ddl");
+ final StringProperties options = new StringProperties(rs);
+
+ final DBFunction func = new DBFunction(name, options, schema, owner, Collections.emptySet(), sql);
listFunction.put(name, func);
}
- }catch(Exception e) {
- throw new ExceptionDBGitRunTime(lang.getValue("errors", "adapter", "fnc").toString(), e);
+
+ } catch(Exception e) {
+ final String msg = lang.getValue("errors", "adapter", "fnc").toString();
+ throw new ExceptionDBGitRunTime(msg, e);
}
return listFunction;
}
@Override
public DBFunction getFunction(String schema, String name) {
- try (Statement stmt = getConnection().createStatement()){
- String query =
- "SELECT ss.name schemaName, o.name functionName, type_desc typeName, definition ddl, USER_NAME(so.uid) owner \n" +
- "FROM sys.sql_modules m \n" +
- "INNER JOIN sys.objects o ON m.object_id = o.object_id\n" +
- "INNER JOIN sysobjects so ON m.object_id = so.id\n" +
- "INNER JOIN sys.schemas ss ON ss.schema_id = o.schema_id\n" +
- "WHERE type_desc like '%function%' AND ss.name = '" + schema + "' AND o.name = '" + name + "'\n";
-
- DBFunction func = null;
- ResultSet rs = stmt.executeQuery(query);
- while (rs.next()) {
- func = new DBFunction(rs.getString("functionName"));
- String owner = rs.getString("owner");
- func.setSchema(schema);
- func.setOwner(owner);
- rowToProperties(rs,func.getOptions());
- }
- return func;
+ final String query =
+ "SELECT ss.name schemaName, o.name functionName, type_desc typeName, definition ddl, USER_NAME(so.uid) owner \n" +
+ "FROM sys.sql_modules m \n" +
+ "INNER JOIN sys.objects o ON m.object_id = o.object_id\n" +
+ "INNER JOIN sysobjects so ON m.object_id = so.id\n" +
+ "INNER JOIN sys.schemas ss ON ss.schema_id = o.schema_id\n" +
+ "WHERE type_desc like '%function%' AND ss.name = '" + schema + "' AND o.name = '" + name + "'\n";
- }catch(Exception e) {
- throw new ExceptionDBGitRunTime(lang.getValue("errors", "adapter", "fnc").toString(), e);
+ try (Statement stmt = getConnection().createStatement(); ResultSet rs = stmt.executeQuery(query);){
+
+ if (!rs.next()) throw new ExceptionDBGitObjectNotFound("");
+
+ final String functionName = rs.getString("functionName");
+ final String owner = rs.getString("owner");
+ final String sql = rs.getString("ddl");
+ final StringProperties options = new StringProperties(rs);
+
+ return new DBFunction(functionName, options, schema, owner, Collections.emptySet(), sql);
+
+ } catch(Exception e) {
+ final String msg = lang.getValue("errors", "adapter", "fnc").toString();
+ throw new ExceptionDBGitRunTime(msg, e);
}
}
@@ -793,134 +812,135 @@ public DBFunction getFunction(String schema, String name) {
// it is not possible to get definition of an encrypted trigger
public Map getTriggers(String schema) {
- Map listTrigger = new HashMap