Skip to content

Commit

Permalink
moving some folders
Browse files Browse the repository at this point in the history
Former-commit-id: 60b89d5156478d5da1a169e670990a09cd46bf59 [formerly aa488bbef2861638d69d359c627749a5aa92efca]
Former-commit-id: 506fb56 [formerly 2b80acb4b64f3afde6d39a3abd133c3b034426bb]
Former-commit-id: 85b394b6ec5ce7c137746e03894b263f46bd83ee
  • Loading branch information
mobilize-mrojas committed Jun 25, 2021
1 parent 5f42f88 commit 19b8706
Show file tree
Hide file tree
Showing 191 changed files with 2,231 additions and 106 deletions.
10 changes: 5 additions & 5 deletions .env
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@ SNOW_ACCOUNT=SnowAccount
SNOW_WAREHOUSE=datawarehouseName
SNOW_DATABASE=DatabaseName
SNOW_ROLE=Rolename
OUT_FOLDER=/workspace/snowconvertdataexportscripts/VerticaLab/Scripts/SQL_Convert/sqls
SUCCESS_FOLDER=/workspace/snowconvertdataexportscripts/VerticaLab/Scripts/SQL_Convert/success
FAILED_FOLDER=/workspace/snowconvertdataexportscripts/VerticaLab/Scripts/SQL_Convert/failed
FORMATTED_FOLDER=/workspace/snowconvertdataexportscripts/VerticaLab/Scripts/SQL_Convert/formatted
LOG_FOLDER=/workspace/snowconvertdataexportscripts/VerticaLab/Scripts/SQL_Convert/log
OUT_FOLDER=/workspace/snowconvertdataexportscripts/Vertica/Scripts/SQL_Convert/sqls
SUCCESS_FOLDER=/workspace/snowconvertdataexportscripts/Vertica/Scripts/SQL_Convert/success
FAILED_FOLDER=/workspace/snowconvertdataexportscripts/Vertica/Scripts/SQL_Convert/failed
FORMATTED_FOLDER=/workspace/snowconvertdataexportscripts/Vertica/Scripts/SQL_Convert/formatted
LOG_FOLDER=/workspace/snowconvertdataexportscripts/Vertica/Scripts/SQL_Convert/log
ACTION=SQL
MAX_THREADS=15
STOP_AFTER=1000000
Expand Down
37 changes: 0 additions & 37 deletions .snowstudio-ci.yml

This file was deleted.

File renamed without changes.
File renamed without changes.
21 changes: 21 additions & 0 deletions Redshift/Code/execute_bash.bash
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@

# Clear output dir
pushd .
cd ../../Redshift/output
rm -rf *
popd
# Clear output_snowflake dir
pushd .
cd ../../Redshift/output_snowflake
rm -rf *
popd
# run extract
R < extract_redshift_tables_from_CSV.r --no-save
# convert to Snowflake
python COM-ES-Scripts-sql2sf.py ../output ../output_snowflake
# run extract
R < extract_redshift_tables_from_CSV.r --no-save
R < Assembly_exec_sqls.r --no-save

cd ../../Redshift/output_snowflake
rm schemas.sql
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Script Overview

The script originally was intended to migrate DDL from a bunch of database engines but not specifically for Redshift.
The script originally was intended to migrate DDL from a bunch of database engines but not specifically for Redshift.

## Scripts issues detected:

Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
11 changes: 0 additions & 11 deletions RedshiftMigrationLab/Code/execute_bash.bash

This file was deleted.

Binary file added Teradata/.DS_Store
Binary file not shown.
92 changes: 92 additions & 0 deletions Teradata/READ_ME.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
# Teradata DDL Extraction Scripts for SnowConvert

STEPS TO EXECUTE DDL CODE GENERATION

**Release 20201111**

**Should be executed in bash shell on a linux environment with access to bteq/tpt utilities.**

1 - Modify ***create_ddls.sh*** in the bin folder – Using a text editor modify the following parameters:

` `‘connection_string’

‘include_databases’

‘exclude_databases’

‘include_objects’

It is recommended to use the user ‘DBC’ in the connection string but a user with sysadmin privileges should also work. Please run on a production-like environment with up to date statistics.

By default the script is setup to exclude system related databases and include all others. You can modify these to get the desired scope, including the operator that is used. Statements need to exclude spaces in the parameter values and values should be all UPPERCASE. Do not remove the parentheses around the entire statement which are needed for compound logic. Do not use LIKE ANY clause for both as it can cause unexpected issues. Example values:

` `“(UPPER(T1.DATABASENAME) NOT IN (‘ALL’, ‘TESTDB’))”

“(UPPER(T1.DATABASENAME) NOT IN (‘ALL’, ‘TESTDB’)) AND UPPER(T1.DATABASENAME) NOT LIKE (‘TD_%’))


2 - After modifying, the ‘*create_ddls.sh’* file can be run from the command line to execute the extract from within the bin directory. The following files will be created in the output folder:

DDL Files - These files will contain the definitions of the objects specified by the file name.

• DDL_Databases.sql

• DDL_Tables.sql

• DDL_Join_Indexes.sql

• DDL_Functions.sql

• DDL_Views.sql

• DDL_Macros.sql

• DDL_Procedures.sql

• insert_statements.sql (these are 2 dummy records created for each Teradata Table - NOT CUSTOMER DATA)

Report Files - These files provide information around key system statistics and objects that can have a specific impact on conversion and migration activities.

• Object_Type_List.txt

• Object_Type_Summary.txt

• Table_List.txt

• Special_Columns_List.txt

• All_Stats.txt

• Table_Stats.txt

• View_Dependency_Detail.txt

• View_Dependency_Report.txt

• Object_Join_Indexes.txt

Usage Report Files - These files provide information relevant to the sizing and usage of the Teradata system. These will not be created unless you uncomment the section for “Creating Usage Reports”

• 90_Day_CPU_Stats.txt

• 90_Day_Node_Stats.txt

• 90_Day_Workload_Stats.txt

Data Profiling Files - These collect information about certain column types in which information about the data is required to understand certain aspects of the migration.

• Data_Profile_Numbers.txt

Invalid Objects Log - This file returns results showing any test failures for views that are not valid.

• invalid_objects.log

TPT Script Files - These files contain auto-generated scripts which can later be used in the data migration process.

• tpt_export_single_script.tpt

• tpt_export_multiple_scripts.tpt

• tables_not_in_tpt_scripts.txt

3 - After a successful run, remove logon information from the top line of each of the files in the scripts folder as well as the ‘*create_ddls.sh’* file. Compress the entire ‘Teradata Source Extract’ and return to Snowflake. Please do not modify or remove any files so that we can review logs as needed.
Binary file added Teradata/scripts/.DS_Store
Binary file not shown.
90 changes: 90 additions & 0 deletions Teradata/scripts/create_ddls.btq
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
.LOGON connection_string;

**** CREATE TABLES FILE ****
.EXPORT FILE = ../temp/SHOW_Tables.sql
.SET WIDTH 65531
SELECT 'SELECT ''/* <sc-table> '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' </sc-table> */'' as "--"; ' || 'SHOW TABLE ' || TRIM(T1.DATABASENAME) || '.' ||TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND IN ('T','O','Q') AND include_databases AND exclude_databases AND include_objects GROUP BY 1;
.EXPORT RESET
.OS rm ../output/object_extracts/DDL/DDL_Tables.sql
.EXPORT FILE = ../output/object_extracts/DDL/DDL_Tables.sql
.SET WIDTH 65531
.RUN FILE = ../temp/SHOW_Tables.sql
.EXPORT RESET


**** CREATE JOIN INDEXES FILE ****
.EXPORT FILE = ../temp/SHOW_Join_Indexes.sql
.SET WIDTH 65531
SELECT 'SELECT ''/* <sc-joinindex> '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' </sc-joinindex> */'' as "--"; ' || 'SHOW JOIN INDEX ' || TRIM(T1.DATABASENAME) || '.' ||TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND IN ('I') AND include_databases AND exclude_databases AND include_objects GROUP BY 1;
.EXPORT RESET
.OS rm ../output/object_extracts/DDL/DDL_Join_Indexes.sql
.EXPORT FILE = ../output/object_extracts/DDL/DDL_Join_Indexes.sql
.SET WIDTH 65531
.RUN FILE = ../temp/SHOW_Join_Indexes.sql
.EXPORT RESET


**** CREATE VIEWS FILE ****
.EXPORT FILE = ../temp/SHOW_Views.sql
.SET WIDTH 65531
SELECT 'SELECT ''/* <sc-view> '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' </sc-view> */'' as "--"; ' || 'SHOW VIEW ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND = 'V' AND include_databases AND exclude_databases AND include_objects GROUP BY 1;
.EXPORT RESET
.OS rm ../output/object_extracts/DDL/DDL_Views.sql
.EXPORT FILE = ../output/object_extracts/DDL/DDL_Views.sql
.SET WIDTH 65531
.RUN FILE = ../temp/SHOW_Views.sql
.EXPORT RESET

**** CREATE FUNCTIONS FILE ****
.EXPORT FILE = ../temp/SHOW_Functions.sql
.SET WIDTH 65531
SELECT 'SELECT ''/* <sc-function> '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.SpecificNAME) || ' <sc-function> */'' as "--"; ' || 'SHOW FUNCTION ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.FUNCTIONNAME) || ';' "--"
FROM DBC.FUNCTIONSV T1 WHERE include_databases AND exclude_databases GROUP BY 1;
.EXPORT RESET
.OS rm ../output/object_extracts/DDL/DDL_Functions.sql
.EXPORT FILE = ../output/object_extracts/DDL/DDL_Functions.sql
.SET WIDTH 65531
.RUN FILE = ../temp/SHOW_Functions.sql
.EXPORT RESET

**** CREATE MACROS FILE ****
.EXPORT FILE = ../temp/SHOW_Macros.sql
.SET WIDTH 65531
SELECT 'SELECT ''/* <sc-macro> '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' </sc-macro> */'' as "--"; ' || 'SHOW MACRO ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND = 'M' AND include_databases AND exclude_databases AND include_objects GROUP BY 1;
.EXPORT RESET
.OS rm ../output/object_extracts/DDL/DDL_Macros.sql
.EXPORT FILE = ../output/object_extracts/DDL/DDL_Macros.sql
.SET WIDTH 65531
.RUN FILE = ../temp/SHOW_Macros.sql
.EXPORT RESET


**** CREATE PROCEDURES FILE ****
.EXPORT FILE = ../temp/SHOW_Procedures.sql
.SET WIDTH 65531
SELECT 'SELECT ''/* <sc-procedure> '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' </sc-procedure> */'' as "--"; ' || 'SHOW PROCEDURE ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND = 'P' AND include_databases AND exclude_databases AND include_objects GROUP BY 1;
.EXPORT RESET
.OS rm ../output/object_extracts/DDL/DDL_Procedures.sql
.EXPORT FILE = ../output/object_extracts/DDL/DDL_Procedures.sql
.SET WIDTH 65531
.RUN FILE = ../temp/SHOW_Procedures.sql
.EXPORT RESET


**** CREATE DATABASES FILE ****
.OS rm ../output/object_extracts/DDL/DDL_Databases.sql
.EXPORT FILE = ../output/object_extracts/DDL/DDL_Databases.sql
.SET WIDTH 65531
SELECT 'CREATE DATABASE ' || TRIM(T1.DATABASENAME) || ' FROM DBC AS PERM = 100000000;' "--" FROM DBC.DATABASESV T1 WHERE include_databases AND exclude_databases GROUP BY 1 ORDER BY 1;
.EXPORT RESET


**** CREATE SNOWFLAKE SCHEMA FILE ****
.OS rm ../output/object_extracts/DDL/DDL_SF_Schemas.sql
.EXPORT FILE = ../output/object_extracts/DDL/DDL_SF_Schemas.sql
.SET WIDTH 65531
SELECT '/* <sc-schema> ' || TRIM(T1.DATABASENAME) || '</sc-schema> */ ' || 'CREATE SCHEMA ' || TRIM(T1.DATABASENAME) || ';' "--" FROM DBC.DATABASESV T1 WHERE include_databases AND exclude_databases GROUP BY 1 ORDER BY 1;
.EXPORT RESET


.quit 0;
Loading

0 comments on commit 19b8706

Please sign in to comment.