Skip to content

Commit

Permalink
Remove splitting mechanism
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-jvasquezrojas committed Feb 23, 2024
1 parent fa000cc commit c89ac0e
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 213 deletions.
60 changes: 30 additions & 30 deletions .github/workflows/upload-release-assets.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,70 +32,70 @@ jobs:
with:
files: DB2/
recursive: false
dest: db2${{ github.event.release.tag_name }}.zip
dest: db2.zip

- name: Build Hive Assests
uses: vimtor/action-zip@v1
with:
files: Hive/
recursive: false
dest: hive${{ github.event.release.tag_name }}.zip
dest: hive.zip

- name: Build Netezza Assests
uses: vimtor/action-zip@v1
with:
files: Netezza/
recursive: false
dest: netezza${{ github.event.release.tag_name }}.zip
dest: netezza.zip

- name: Build Oracle Assests
uses: vimtor/action-zip@v1
with:
files: Oracle/
recursive: false
dest: oracle${{ github.event.release.tag_name }}.zip
dest: oracle.zip

- name: Build Redshift Assests
uses: vimtor/action-zip@v1
with:
files: Redshift/
recursive: false
dest: redshift${{ github.event.release.tag_name }}.zip
dest: redshift.zip

- name: Build SQL Server Assests
uses: vimtor/action-zip@v1
with:
files: SQLServer/
recursive: false
dest: sql-server${{ github.event.release.tag_name }}.zip
dest: sql-server.zip

- name: Build Teradata Assests
uses: vimtor/action-zip@v1
with:
files: Teradata/
recursive: false
dest: teradata${{ github.event.release.tag_name }}.zip
dest: teradata.zip

- name: Build Vertica Assests
uses: vimtor/action-zip@v1
with:
files: Vertica/
recursive: false
dest: vertica${{ github.event.release.tag_name }}.zip
dest: vertica.zip

- name: Build BigQuery Assests
uses: vimtor/action-zip@v1
with:
files: BigQuery/
recursive: false
dest: bigquery${{ github.event.release.tag_name }}.zip
dest: bigquery.zip

- name: Build Databricks Assests
uses: vimtor/action-zip@v1
with:
files: Databricks/
recursive: true
dest: databricks${{ github.event.release.tag_name }}.zip
dest: databricks.zip

- name: Build AlternativeSQLServerExtractionMethods Assests
uses: vimtor/action-zip@v1
Expand All @@ -111,8 +111,8 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }} # This pulls from the CREATE RELEASE step above, referencing it's ID to get its outputs object, which include a `upload_url`. See this blog post for more info: https://jasonet.co/posts/new-features-of-github-actions/#passing-data-to-future-steps
asset_path: ./db2${{ github.event.release.tag_name }}.zip
asset_name: db2${{ github.event.release.tag_name }}.zip
asset_path: ./db2.zip
asset_name: db2.zip
asset_content_type: application/zip

- name: Upload Hive Release Asset
Expand All @@ -122,8 +122,8 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./hive${{ github.event.release.tag_name }}.zip
asset_name: hive${{ github.event.release.tag_name }}.zip
asset_path: ./hive.zip
asset_name: hive.zip
asset_content_type: application/zip

- name: Upload Netezza Release Asset
Expand All @@ -133,8 +133,8 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./netezza${{ github.event.release.tag_name }}.zip
asset_name: netezza${{ github.event.release.tag_name }}.zip
asset_path: ./netezza.zip
asset_name: netezza.zip
asset_content_type: application/zip

- name: Upload Oracle Release Asset
Expand All @@ -144,8 +144,8 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./oracle${{ github.event.release.tag_name }}.zip
asset_name: oracle${{ github.event.release.tag_name }}.zip
asset_path: ./oracle.zip
asset_name: oracle.zip
asset_content_type: application/zip

- name: Upload Redshift Release Asset
Expand All @@ -155,8 +155,8 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./redshift${{ github.event.release.tag_name }}.zip
asset_name: redshift${{ github.event.release.tag_name }}.zip
asset_path: ./redshift.zip
asset_name: redshift.zip
asset_content_type: application/zip

- name: Upload SQL Server Release Asset
Expand All @@ -166,8 +166,8 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./sql-server${{ github.event.release.tag_name }}.zip
asset_name: sql-server${{ github.event.release.tag_name }}.zip
asset_path: ./sql-server.zip
asset_name: sql-server.zip
asset_content_type: application/zip

- name: Upload Teradata Release Asset
Expand All @@ -177,8 +177,8 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./teradata${{ github.event.release.tag_name }}.zip
asset_name: teradata${{ github.event.release.tag_name }}.zip
asset_path: ./teradata.zip
asset_name: teradata.zip
asset_content_type: application/zip

- name: Upload Vertica Release Asset
Expand All @@ -188,8 +188,8 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./vertica${{ github.event.release.tag_name }}.zip
asset_name: vertica${{ github.event.release.tag_name }}.zip
asset_path: ./vertica.zip
asset_name: vertica.zip
asset_content_type: application/zip

- name: Upload BigQuery Release Asset
Expand All @@ -199,8 +199,8 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./bigquery${{ github.event.release.tag_name }}.zip
asset_name: bigquery${{ github.event.release.tag_name }}.zip
asset_path: ./bigquery.zip
asset_name: bigquery.zip
asset_content_type: application/zip

- name: Upload Databricks Release Asset
Expand All @@ -210,8 +210,8 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./databricks${{ github.event.release.tag_name }}.zip
asset_name: databricks${{ github.event.release.tag_name }}.zip
asset_path: ./databricks.zip
asset_name: databricks.zip
asset_content_type: application/zip

- name: Upload AlternativeSQLServerExtractionMethods Assest
Expand Down
185 changes: 3 additions & 182 deletions Teradata/bin/create_ddls.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
#Version 20211210: Fix error messages
#Version 20230811: Add command to copy the scripts from scripts_template.
#Version 20240201: Add spliting mechanism for output code.
#Version 20240223: Remove spliting mechanism for output code.

##### Modify the connection information
connection_string="dbc,dbc"
Expand All @@ -28,7 +29,6 @@ mkdir -p ../temp
mkdir -p ../output
mkdir -p ../output/object_extracts
mkdir -p ../output/object_extracts/DDL
mkdir -p ../output/object_extracts/Splits
cp -r ../scripts_template ../scripts
touch -- "../output/object_extracts/DDL/.sc_extracted"

Expand Down Expand Up @@ -59,186 +59,7 @@ echo 'Replacing unicode values...'
[[ ! -f ../output/object_extracts/DDL/DDL_Macros.sql ]] || sed -i -e "s|\U2013|-|g" -e "s|\U00D8|0|g" -e "s|\U00A0| |g" -e "s|\U1680| |g" -e "s|\U180E| |g" -e "s|\U2000| |g" -e "s|\U2001| |g" -e "s|\U2002| |g" -e "s|\U2003| |g" -e "s|\U2004| |g" -e "s|\U2005| |g" -e "s|\U2006| |g" -e "s|\U2007| |g" -e "s|\U2008| |g" -e "s|\U2009| |g" -e "s|\U200A| |g" -e "s|\U200B| |g" -e "s|\U202F| |g" -e "s|\U205F| |g" -e "s|\U3000| |g" -e "s|\UFEFF| |g" ../output/object_extracts/DDL/DDL_Macros.sql
[[ ! -f ../output/object_extracts/DDL/DDL_Procedures.sql ]] || sed -i -e "s|\U2013|-|g" -e "s|\U00D8|0|g" -e "s|\U00A0| |g" -e "s|\U1680| |g" -e "s|\U180E| |g" -e "s|\U2000| |g" -e "s|\U2001| |g" -e "s|\U2002| |g" -e "s|\U2003| |g" -e "s|\U2004| |g" -e "s|\U2005| |g" -e "s|\U2006| |g" -e "s|\U2007| |g" -e "s|\U2008| |g" -e "s|\U2009| |g" -e "s|\U200A| |g" -e "s|\U200B| |g" -e "s|\U202F| |g" -e "s|\U205F| |g" -e "s|\U3000| |g" -e "s|\UFEFF| |g" ../output/object_extracts/DDL/DDL_Procedures.sql


##SPLIT FILES AND ORGANIZE INTO DATABASES BY OBJECT TYPE

echo 'Create Database Folders...'
cp ../output/object_extracts/DDL/DDL_Databases.sql ../output/object_extracts/DDL/DDL_Databases2.sql
[[ ! -f ../output/object_extracts/DDL/DDL_Databases2.sql ]] || sed -i -e "s/CREATE DATABASE //g" -e "s|\sFROM.*||g" -e 's/.*/"&"/' ../output/object_extracts/DDL/DDL_Databases2.sql
cd ../output/object_extracts/
xargs mkdir -p < DDL/DDL_Databases2.sql
rm DDL/DDL_Databases2.sql

for dir in */; do
mkdir -- "$dir/table";
mkdir -- "$dir/view";
mkdir -- "$dir/joinindex";
mkdir -- "$dir/function";
mkdir -- "$dir/macro";
mkdir -- "$dir/procedure";
mkdir -- "$dir/trigger";
mkdir -- "$dir/schema";
mkdir -- "$dir/unknown";
done


echo 'Splitting...'

mkdir -p Splits/table
mkdir -p Splits/view
mkdir -p Splits/joinindex
mkdir -p Splits/function
mkdir -p Splits/macro
mkdir -p Splits/procedure
mkdir -p Splits/trigger
mkdir -p Splits/schema
mkdir -p Splits/unknown


echo '...Tables..'
cd Splits/table
SPLIT_TERM=sc-table
FILE=../../DDL/DDL_Tables.sql
csplit -f File_ -b "%07d.sql" -s $FILE /$SPLIT_TERM/ "{$(($(grep -c -- $SPLIT_TERM $FILE)-1))}"
rm File_0000000.sql

for file in File_*; do
FLNAME=$(grep -o -P '.+?(?= <\/sc)' $file | cut -c 15-)
DBNAME=$(grep -o -P '(?<=<sc-table> )(.*?)(?=\..* </sc-table>)' $file)
FLNAME=${FLNAME/$DBNAME\./}
mkdir -p ../../table/"$DBNAME"
mv $file ../../table/"$DBNAME"/"$FLNAME.sql"
done



echo '...Views..'
cd ../view
SPLIT_TERM=sc-view
FILE=../../DDL/DDL_Views.sql
csplit -f File_ -b "%07d.sql" -s $FILE /$SPLIT_TERM/ "{$(($(grep -c -- $SPLIT_TERM $FILE)-1))}"
rm File_0000000.sql

for file in File_*; do
FLNAME=$(grep -o -P '.+?(?= <\/sc)' $file | cut -c 14-)
DBNAME=$(grep -o -P '(?<=<sc-view> )(.*?)(?=\..* </sc-view>)' $file)
FLNAME=${FLNAME/$DBNAME\./}
mkdir -p ../../view/"$DBNAME"
mv $file ../../view/"$DBNAME"/"$FLNAME.sql"
done



echo '...Join Indexes..'
cd ../joinindex
SPLIT_TERM=sc-joinindex
FILE=../../DDL/DDL_Join_Indexes.sql
csplit -f File_ -b "%07d.sql" -s $FILE /$SPLIT_TERM/ "{$(($(grep -c -- $SPLIT_TERM $FILE)-1))}"
rm File_0000000.sql

for file in File_*; do
FLNAME=$(grep -o -P '.+?(?= <\/sc)' $file | cut -c 19-)
DBNAME=$(grep -o -P '(?<=<sc-joinindex> )(.*?)(?=\..* </sc-joinindex>)' $file)
FLNAME=${FLNAME/$DBNAME\./}
mkdir -p ../../joinindex/"$DBNAME"
mv $file ../../joinindex/"$DBNAME"/"$FLNAME.sql"
done



echo '...Functions..'
cd ../function
SPLIT_TERM=sc-function
FILE=../../DDL/DDL_Functions.sql
csplit -f File_ -b "%07d.sql" -s $FILE /$SPLIT_TERM/ "{$(($(grep -c -- $SPLIT_TERM $FILE)-1))}"
rm File_0000000.sql

for file in File_*; do
FLNAME=$(grep -o -P '.+?(?= <\/sc)' $file | cut -c 18-)
DBNAME=$(grep -o -P '(?<=<sc-function> )(.*?)(?=\..* </sc-function>)' $file)
FLNAME=${FLNAME/$DBNAME\./}
mkdir -p ../../function/"$DBNAME"
mv $file ../../function/"$DBNAME"/"$FLNAME.sql"
done



echo '...Macros..'
cd ../macro
SPLIT_TERM=sc-macro
FILE=../../DDL/DDL_Macros.sql
csplit -f File_ -b "%07d.sql" -s $FILE /$SPLIT_TERM/ "{$(($(grep -c -- $SPLIT_TERM $FILE)-1))}"
rm File_0000000.sql

for file in File_*; do
FLNAME=$(grep -o -P '.+?(?= <\/sc)' $file | cut -c 15-)
DBNAME=$(grep -o -P '(?<=<sc-macro> )(.*?)(?=\..* </sc-macro>)' $file)
FLNAME=${FLNAME/$DBNAME\./}
mkdir -p ../../macro/"$DBNAME"
mv $file ../../macro/"$DBNAME"/"$FLNAME.sql"
done



echo '...Procedures..'
cd ../procedure
SPLIT_TERM=sc-procedure
FILE=../../DDL/DDL_Procedures.sql
csplit -f File_ -b "%07d.sql" -s $FILE /$SPLIT_TERM/ "{$(($(grep -c -- $SPLIT_TERM $FILE)-1))}"
rm File_0000000.sql

for file in File_*; do
FLNAME=$(grep -o -P '.+?(?= <\/sc)' $file | cut -c 19-)
DBNAME=$(grep -o -P '(?<=<sc-procedure> )(.*?)(?=\..* </sc-procedure>)' $file)
FLNAME=${FLNAME/$DBNAME\./}
mkdir -p ../../procedure/"$DBNAME"
mv $file ../../procedure/"$DBNAME"/"$FLNAME.sql"
done



echo '...Triggers..'
cd ../trigger
SPLIT_TERM=sc-trigger
FILE=../../DDL/DDL_Trigger.sql
csplit -f File_ -b "%07d.sql" -s $FILE /$SPLIT_TERM/ "{$(($(grep -c -- $SPLIT_TERM $FILE)-1))}"
rm File_0000000.sql

for file in File_*; do
FLNAME=$(grep -o -P '.+?(?= <\/sc)' $file | cut -c 17-)
DBNAME=$(grep -o -P '(?<=trigger> )(.*?)(?=\..* </sc-trigger>)' $file)
FLNAME=${FLNAME/$DBNAME\./}
mkdir -p ../../trigger/"$DBNAME"
mv $file ../../trigger/"$DBNAME"/"$FLNAME.sql"
done

echo '...Schemas..'
cd ../schema
SPLIT_TERM=sc-schema
FILE=../../DDL/DDL_SF_Schemas.sql
csplit -f File_ -b "%07d.sql" -s $FILE /$SPLIT_TERM/ "{$(($(grep -c -- $SPLIT_TERM $FILE)-1))}"
rm File_0000000.sql

for file in File_*; do
FLNAME=$(grep -o -P '.+?(?= <\/sc)' $file | cut -c 15-)

mkdir -p ../../schema/NO_SCHEMA
mv $file ../../schema/NO_SCHEMA/"$FLNAME.sql"
done

echo '...Cleaning Up Files'

cd ../../../../bin
mkdir -p ../output/object_extracts/unknown/NO_SCHEMA
mv ../output/object_extracts/DDL/DDL_Databases.sql ../output/object_extracts/unknown/NO_SCHEMA/DDL_Databases.sql
rm -r ../output/object_extracts/DDL
rm -r ../output/object_extracts/Splits
rm -r ../temp
rm -r ../scripts

cd ../output/object_extracts
find . -type d -empty -delete

echo '...DDL Creation Complete'


rm -r ../temp
rm -r ../scripts
Loading

0 comments on commit c89ac0e

Please sign in to comment.