September 6, 2023 at 7:19 pm
Please assist. I have developed an ETL, using these codes to get data from the source and to the destination:
This table did not have a primary key and secondary keys with unique values. I brought it in:
-- Table: system.IMETA_ZTRB_MP$F_ZTBR_TA_BW
-- DROP TABLE IF EXISTS system."IMETA_ZTRB_MP$F_ZTBR_TA_BW";
CREATE TABLE IF NOT EXISTS system."IMETA_ZTRB_MP$F_ZTBR_TA_BW"
(
"ZTBR_TransactionCode" integer NOT NULL DEFAULT nextval('system."ZTBR_Query_Destination_ZTBR_TransactionCode_seq"'::regclass),
"Company_Code" character varying(4) COLLATE pg_catalog."default",
"Posting_Period" character varying(7) COLLATE pg_catalog."default",
"Fiscal_Year" character varying(4) COLLATE pg_catalog."default",
"Profit_Center" character varying(255) COLLATE pg_catalog."default",
"Account_Number" character varying(255) COLLATE pg_catalog."default",
"Business_Process" character varying(255) COLLATE pg_catalog."default",
"Cost_Center" character varying(10) COLLATE pg_catalog."default",
"Internal_Order" character varying(255) COLLATE pg_catalog."default",
"Trading_Partner" character varying(255) COLLATE pg_catalog."default",
"Amount_in_company_code_currency" numeric(17,2),
"Company_code_currency" character varying(5) COLLATE pg_catalog."default",
"BRACS_FA" character varying(255) COLLATE pg_catalog."default",
"Expense_Type" character varying(255) COLLATE pg_catalog."default",
"BRACS_ACCT_Key" character varying(255) COLLATE pg_catalog."default",
"CC_Direct" character varying(255) COLLATE pg_catalog."default",
"Segment_PC" character varying(255) COLLATE pg_catalog."default",
"CC_Master_FA" character varying(255) COLLATE pg_catalog."default",
"Master_BRACS_Secondary_Key" integer,
"Source_Description_Secondary_Key" integer,
"Direct_Indirect_Secondary_Key" integer,
"Entity_Secondary_Key" integer,
"Region_Secondary_Key" integer,
"RowInsertedTimestamp" date DEFAULT CURRENT_DATE,
"RowUpdatedTimestamp" timestamp DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "ZTBR_Query_Destination_pkey" PRIMARY KEY ("ZTBR_TransactionCode"),
CONSTRAINT fk_direct_indirect FOREIGN KEY ("Direct_Indirect_Secondary_Key")
REFERENCES dim."IMETA_Direct_Indirect_Mapping" ("Direct_Secondary_Key") MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION,
CONSTRAINT fk_entity FOREIGN KEY ("Entity_Secondary_Key")
REFERENCES dim."IMETA_Entity_Mapping" ("Entity_Secondary_Key") MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION,
CONSTRAINT fk_master_bracs FOREIGN KEY ("Master_BRACS_Secondary_Key")
REFERENCES dim."IMETA_Master_BRACS_to_SAP_Data_TA_BR_" ("ZTBR_TransactionCode") MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION,
CONSTRAINT fk_source_description FOREIGN KEY ("Source_Description_Secondary_Key")
REFERENCES dim."IMETA_Source_Description_Mapping" ("BRACS_Key") MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
)
TABLESPACE pg_default;
I want to use SSIS to bring in values starting from 1:
"Master_BRACS_Secondary_Key" integer,
"Source_Description_Secondary_Key" integer,
"Direct_Indirect_Secondary_Key" integer,
"Entity_Secondary_Key" integer,
"Region_Secondary_Key" integer,
These columns contain NULLS and when I try to amend it so that it can start from 1, my ETL fails because of a secondary key violation.
This insert produce no data in table because of the secondary keys containing nulls.
- Check if the table already exists
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1
FROM information_schema.tables
WHERE table_name = 'IMETA_ZTBR_BRACS_Model_TA_BW'
) THEN
-- Create the table
CREATE TABLE model."IMETA_ZTBR_BRACS_Model_TA_BW" AS
SELECT
ZTBR."Company_Code",
CAST(ZTBR."Posting_Period" AS INTEGER) AS "Posting_Period_Cast",
ZTBR."Fiscal_Year",
ZTBR."Profit_Center",
ZTBR."Account_Number",
ZTBR."Business_Process",
ZTBR."Cost_Center",
ZTBR."Internal_Order",
ZTBR."Trading_Partner",
ZTBR."Amount_in_company_code_currency",
ZTBR."Company_code_currency",
ZTBR."BRACS_FA",
ZTBR."Expense_Type",
ZTBR."BRACS_ACCT_Key",
ZTBR."CC_Direct",
ZTBR."Segment_PC",
ZTBR."CC_Master_FA",
DIM_REGION_MAP."CoCd",
DIM_REGION_MAP."Sub Region" AS "DIM_Sub_Region",
DIM_REGION_MAP."Region" AS "DIM_Region",
DIM_REGION_MAP."BRACS Entity" AS "DIM_BRACS_Entity",
DIM_REGION_MAP."Consul",
DIM_REGION_MAP."Report",
DIM_REGION_MAP."Region BRACS" AS "DIM_Region_BRACS",
DIM_REGION_MAP."Group",
DIM_REGION_MAP."Group BRACS" AS "DIM_Group_BRACS",
DIM_REGION_MAP."J",
DIM_REGION_MAP."K",
DIM_REGION_MAP."L",
DIM_REGION_MAP."M",
DIM_REGION_MAP."N",
DIM_ENT_MAP."Entity",
DIM_ENT_MAP."Entity Name" AS "DIM_Entity_Name",
DIM_ENT_MAP."Entity Level" AS "DIM_Entity_Level",
DIM_ENT_MAP."Level 1" AS "DIM_Level_1",
DIM_ENT_MAP."Level 2" AS "DIM_Level_2",
DIM_ENT_MAP."Level 3" AS "DIM_Level_3",
DIM_ENT_MAP."Level 4" AS "DIM_Level_4",
DIM_ENT_MAP."Level 5" AS "DIM_Level_5",
DIM_ENT_MAP."Level 6" AS "DIM_Level_6",
DIM_DIR_IND."CLASSIFICATION" AS "CLASSIFICATION",
DIM_SRC_DESC."BRACS_Key" AS "DIM_SRC_DESC_BRACS_Key",
DIM_SRC_DESC."BRACSFA",
DIM_SRC_DESC."Function" AS "Function",
DIM_MASTER_BRACS."ZTBR_TransactionCode" AS "ZTBR_TransactionCode",
DIM_MASTER_BRACS."Acct Type" AS "Acct Type",
DIM_MASTER_BRACS."Level 1" AS "Master_Level_1",
DIM_MASTER_BRACS."Level 2" AS "Master_Level_2",
DIM_MASTER_BRACS."Level 3" AS "Master_Level_3",
DIM_MASTER_BRACS."GCoA" AS "GCoA",
DIM_MASTER_BRACS."EXPENSE FLAG" AS "EXPENSE FLAG",
DIM_MASTER_BRACS."BRACS" AS "BRACS",
DIM_MASTER_BRACS."BRACS_DESC" AS "BRACS_DESC",
DIM_MASTER_BRACS."Loaddate" AS "Loaddate",
CURRENT_DATE AS "RowInsertedTimestamp",
CURRENT_TIMESTAMP AS "RowUpdatedTimestamp",
DIM_DIR_IND."Direct_Primary_Key" AS "Direct_Primary_Key",
DIM_ENT_MAP."Entity_ID" AS "Entity_ID",
DIM_MASTER_BRACS."ZTBR_TransactionCode" AS "Master_ZTBR_TransactionCode",
DIM_SRC_DESC."BRACS_Key" AS "DIM_SRC_DESC_BRACS_Key_2",
DIM_REGION_MAP."Region_Primary_Key" AS "Region_Primary_Key"
FROM
system."IMETA_ZTRB_MP$F_ZTBR_TA_BW" AS ZTBR
JOIN
dim."IMETA_Direct_Indirect_Mapping" AS DIM_DIR_IND ON ZTBR."Direct_Indirect_Secondary_Key" = DIM_DIR_IND."Direct_Secondary_Key"
JOIN
dim."IMETA_Entity_Mapping" AS DIM_ENT_MAP ON ZTBR."Entity_Secondary_Key" = DIM_ENT_MAP."Entity_Secondary_Key"
JOIN
dim."IMETA_Master_BRACS_to_SAP_Data_TA_BR_" AS DIM_MASTER_BRACS ON ZTBR."Master_BRACS_Secondary_Key" = DIM_MASTER_BRACS."ZTBR_TransactionCode"
JOIN
dim."IMETA_Source_Description_Mapping" AS DIM_SRC_DESC ON ZTBR."Source_Description_Secondary_Key" = DIM_SRC_DESC."BRACS_Key"
JOIN
dim."IMETA_Region_Mapping" AS DIM_REGION_MAP ON ZTBR."Region_Secondary_Key" = DIM_REGION_MAP."Region_Primary_Key";
END IF;
END $$;
Im trying the Script task, but I'm getting errors:
using System;using System.Data;using Microsoft.SqlServer.Dts.Pipeline.Wrapper;using Microsoft.SqlServer.Dts.Runtime.Wrapper; [Microsoft.SqlServer.Dts.Pipeline.SSISScriptComponentEntryPointAttribute]public class ScriptMain : UserComponent{ // Declare counter variables at the class level int rowNumber_Master_BRACS = 0; int rowNumber_Source_Description = 0; int rowNumber_Direct_Indirect = 0; int rowNumber_Entity = 0; int rowNumber_Region = 0; public override void PreExecute() { base.PreExecute(); // Initialize counter variables to start from 1 rowNumber_Master_BRACS = 0; rowNumber_Source_Description = 0; rowNumber_Direct_Indirect = 0; rowNumber_Entity = 0; rowNumber_Region = 0; } public override void Input0_ProcessInputRow(Input0Buffer Row) { // Make sure to add these columns in your Script Component's Output Columns setting Row.AddRow(); Row.Master_BRACS_Secondary_Key = ++rowNumber_Master_BRACS; Row.Source_Description_Secondary_Key = ++rowNumber_Source_Description; Row.Direct_Indirect_Secondary_Key = ++rowNumber_Direct_Indirect; Row.Entity_Secondary_Key = ++rowNumber_Entity; Row.Region_Secondary_Key = ++rowNumber_Region; } public override void PostExecute() { base.PostExecute(); }}
September 7, 2023 at 8:10 am
Ideally, all of your DDL should be happening outside of SSIS. Can't tell whether this is the case here.
Use derived columns within your SSIS data flow to populate the non-nullable columns. Or modify your source query (depending on the source).
Viewing 2 posts - 1 through 1 (of 1 total)
You must be logged in to reply to this topic. Login to reply
This website stores cookies on your computer.
These cookies are used to improve your website experience and provide more personalized services to you, both on this website and through other media.
To find out more about the cookies we use, see our Privacy Policy