AWS DMS CDC failing to replicate text column from MySQL - mysql

We are using AWS DMS to replicate data from MySQL to Aurora MySQL with ongoing replication. We setup task validation to ensure source to target replication was successful. One of the tables has a text column which fails validation for ongoing replication. The tasks replicates the column properly during full load but during ongoing replication, the target column is always empty string.
The schemas for the source and target are identical.
Below are the task settings for the replication task.
{
"TargetMetadata": {
"TargetSchema": "mydb",
"SupportLobs": true,
"FullLobMode": true,
"LobChunkSize": 64,
"LimitedSizeLobMode": false,
"LobMaxSize": 0,
"InlineLobMaxSize": 0,
"LoadMaxFileSize": 0,
"ParallelLoadThreads": 0,
"ParallelLoadBufferSize": 0,
"BatchApplyEnabled": false,
"TaskRecoveryTableEnabled": false
},
"FullLoadSettings": {
"TargetTablePrepMode": "DO_NOTHING",
"CreatePkAfterFullLoad": false,
"StopTaskCachedChangesApplied": false,
"StopTaskCachedChangesNotApplied": false,
"MaxFullLoadSubTasks": 8,
"TransactionConsistencyTimeout": 600,
"CommitRate": 10000
},
"Logging": {
"EnableLogging": true,
"LogComponents": [
{
"Id": "SOURCE_UNLOAD",
"Severity": "LOGGER_SEVERITY_DEFAULT"
},
{
"Id": "SOURCE_CAPTURE",
"Severity": "LOGGER_SEVERITY_DEFAULT"
},
{
"Id": "TARGET_LOAD",
"Severity": "LOGGER_SEVERITY_DEFAULT"
},
{
"Id": "TARGET_APPLY",
"Severity": "LOGGER_SEVERITY_DEFAULT"
},
{
"Id": "TASK_MANAGER",
"Severity": "LOGGER_SEVERITY_DEFAULT"
}
],
"CloudWatchLogGroup": "dms-tasks-test-dms-replication-instance",
"CloudWatchLogStream": "dms-task"
},
"ControlTablesSettings": {
"historyTimeslotInMinutes": 5,
"ControlSchema": "",
"HistoryTimeslotInMinutes": 5,
"HistoryTableEnabled": false,
"SuspendedTablesTableEnabled": false,
"StatusTableEnabled": false
},
"StreamBufferSettings": {
"StreamBufferCount": 3,
"StreamBufferSizeInMB": 8,
"CtrlStreamBufferSizeInMB": 5
},
"ChangeProcessingDdlHandlingPolicy": {
"HandleSourceTableDropped": true,
"HandleSourceTableTruncated": true,
"HandleSourceTableAltered": true
},
"ErrorBehavior": {
"DataErrorPolicy": "LOG_ERROR",
"DataTruncationErrorPolicy": "LOG_ERROR",
"DataErrorEscalationPolicy": "SUSPEND_TABLE",
"DataErrorEscalationCount": 50,
"TableErrorPolicy": "SUSPEND_TABLE",
"TableErrorEscalationPolicy": "STOP_TASK",
"TableErrorEscalationCount": 50,
"RecoverableErrorCount": 0,
"RecoverableErrorInterval": 5,
"RecoverableErrorThrottling": true,
"RecoverableErrorThrottlingMax": 1800,
"ApplyErrorDeletePolicy": "IGNORE_RECORD",
"ApplyErrorInsertPolicy": "LOG_ERROR",
"ApplyErrorUpdatePolicy": "LOG_ERROR",
"ApplyErrorEscalationPolicy": "LOG_ERROR",
"ApplyErrorEscalationCount": 0,
"ApplyErrorFailOnTruncationDdl": false,
"FullLoadIgnoreConflicts": true,
"FailOnTransactionConsistencyBreached": false,
"FailOnNoTablesCaptured": false
},
"ChangeProcessingTuning": {
"BatchApplyPreserveTransaction": true,
"BatchApplyTimeoutMin": 1,
"BatchApplyTimeoutMax": 30,
"BatchApplyMemoryLimit": 500,
"BatchSplitSize": 0,
"MinTransactionSize": 1000,
"CommitTimeout": 1,
"MemoryLimitTotal": 1024,
"MemoryKeepTime": 60,
"StatementCacheSize": 50
},
"PostProcessingRules": null,
"CharacterSetSettings": null,
"LoopbackPreventionSettings": null
}
I have been having issue finding anything on this. Does anyone have suggestions on how to troubleshoot or fix?

Related

How to Change the defaults in Great Expectations DataDoc HTML Report?

Great Expectations provides the ability to produce Html reports using DataDocs as shown in the folloiwng example:
I would like the change the following defaults in the header - see image
The report is generated using the following
validation_results_stg = ge_stg_update.validate(expectation_suite='stg_expectations.html', only_return_failures=False)
I believe I can make changes because if I run the code validatation_results_stg
I get the following output
{
"evaluation_parameters": {},
"meta": {
"great_expectations_version": "0.13.38",
"expectation_suite_name": "default",
"run_id": {
"run_time": "2021-11-18T14:59:49.831733+00:00",
"run_name": null
},
"batch_kwargs": {
"ge_batch_id": "2c157d12-4880-11ec-8b5e-000d3ad66fea"
},
"batch_markers": {},
"batch_parameters": {},
"validation_time": "20211118T145949.831530Z",
"expectation_suite_meta": {
"great_expectations_version": "0.13.38"
}
},
"results": [
{
"result": {
"element_count": 14539,
"missing_count": 0,
"missing_percent": 0.0,
"unexpected_count": 0,
"unexpected_percent": 0.0,
"unexpected_percent_total": 0.0,
"unexpected_percent_nonmissing": 0.0,
"partial_unexpected_list": []
},
"exception_info": {
"raised_exception": false,
"exception_message": null,
"exception_traceback": null
},
"meta": {},
"success": true,
"expectation_config": {
"kwargs": {
"column": "SERVICE",
"value_set": [
"CMC",
"Divorce",
"Probate",
"SSCS"
]
},
"expectation_type": "expect_column_values_to_be_in_set",
"meta": {},
"ge_cloud_id": null
}
},
{
"result": {
"observed_value": 14539
},
"exception_info": {
"raised_exception": false,
"exception_message": null,
"exception_traceback": null
},
"meta": {},
"success": true,
"expectation_config": {
"kwargs": {
"value": 14539
},
"expectation_type": "expect_table_row_count_to_equal",
"meta": {},
"ge_cloud_id": null
}
}
],
"success": true,
"statistics": {
"evaluated_expectations": 2,
"successful_expectations": 2,
"unsuccessful_expectations": 0,
"success_percent": 100.0
}
}
As you can see the defaults are loaded e.g. you will notice "expectation_suite_name": "default",
Can someone show me how you can the default to something else?

How to configure everything into one or multiple configure files

I haven't used this since 2 years ago. I'm trying to figure out how to put everything into one json or it has to be mulitple jsons.
Using v11.9.46. The documentation is not clear as it references things that are for one version or another.
I'm moving items from ADOorg1 to ADOorg2. In this order I was going to setup the json
Areas and Iterations
Team setup
Shared Queries
Work Items (using a query to find select area)
This is what I had so far as I tried to figure out how to put it into one:
{
"ChangeSetMappingFile": null,
// "Endpoints": {
// "TfsTeamSettingsEndpoints": [
// {
// "Name": "TeamSettingsSource",
// "AccessToken": "",
// "Query": {
// "Query": "SELECT [System.Id], [System.Tags] FROM WorkItems WHERE [System.TeamProject] = #TeamProject AND [System.WorkItemType] NOT IN ('Test Suite', 'Test Plan') ORDER BY [System.ChangedDate] desc"
// },
// "Organisation": "https://dev.azure.com/test1/",
// "Project": "test1",
// "ReflectedWorkItemIdField": "ReflectedWorkItemId",
// "AuthenticationMode": "AccessToken",
// "AllowCrossProjectLinking": false,
// "LanguageMaps": {
// "AreaPath": "Area",
// "IterationPath": "Iteration"
// }
// },
// {
// "Name": "TeamSettingsTarget",
// "AccessToken": "",
// "Query": {
// "Query": "SELECT [System.Id], [System.Tags] FROM WorkItems WHERE [System.TeamProject] = #TeamProject AND [System.WorkItemType] NOT IN ('Test Suite', 'Test Plan') ORDER BY [System.ChangedDate] desc"
// },
// "Organisation": "https://dev.azure.com/test2/",
// "Project": "test2",
// "ReflectedWorkItemIdField": "ReflectedWorkItemId",
// "AuthenticationMode": "AccessToken",
// "AllowCrossProjectLinking": false,
// "LanguageMaps": {
// "AreaPath": "Area",
// "IterationPath": "Iteration"
// }
// }
// ]
// },
"Source": {
"$type": "TfsTeamProjectConfig",
"Collection": "https://dev.azure.com/test1/",
"Project": "test1",
"ReflectedWorkItemIDFieldName": "ReflectedWorkItemId",
"AllowCrossProjectLinking": false,
"AuthenticationMode": "Prompt",
"PersonalAccessToken": "",
"LanguageMaps": {
"AreaPath": "Area",
"IterationPath": "Iteration"
}
},
"Target": {
"$type": "TfsTeamProjectConfig",
"Collection": "https://dev.azure.com/test2/",
"Project": "Test2",
"ReflectedWorkItemIDFieldName": "ReflectedWorkItemId",
"AllowCrossProjectLinking": false,
"AuthenticationMode": "Prompt",
"PersonalAccessToken": "",
"LanguageMaps": {
"AreaPath": "Area",
"IterationPath": "Iteration"
}
},
"Endpoints": {
"InMemoryWorkItemEndpoints": [
{
"Name": "Source",
"EndpointEnrichers": null
},
{
"Name": "Target",
"EndpointEnrichers": null
}
]
},
"GitRepoMapping": null,
"LogLevel": "Information",
"Processors": [
{
"$type": "TfsAreaAndIterationProcessorOptions",
"Enabled": true,
"PrefixProjectToNodes": false,
"NodeBasePaths": null,
"ProcessorEnrichers": null,
"SourceName": "Source",
"TargetName": "Target"
},
{
"$type": "TfsTeamSettingsProcessorOptions",
"Enabled": false,
"MigrateTeamSettings": true,
"UpdateTeamSettings": true,
"PrefixProjectToNodes": false,
"Teams": null,
"ProcessorEnrichers": null,
"SourceName": "Source",
"TargetName": "Target"
},
{
"$type": "WorkItemMigrationConfig",
"Enabled": false,
"ReplayRevisions": true,
"PrefixProjectToNodes": false,
"UpdateCreatedDate": true,
"UpdateCreatedBy": true,
"BuildFieldTable": false,
"AppendMigrationToolSignatureFooter": false,
"WIQLQueryBit": "AND [Microsoft.VSTS.Common.ClosedDate] = '' AND [System.WorkItemType] NOT IN ('Test Suite', 'Test Plan')",
"WIQLOrderBit": "[System.ChangedDate] desc",
"LinkMigration": true,
"AttachmentMigration": true,
"AttachmentWorkingPath": "E:\\temp\\WorkItemAttachmentWorkingFolder\\",
"FixHtmlAttachmentLinks": false,
"SkipToFinalRevisedWorkItemType": true,
"WorkItemCreateRetryLimit": 5,
"FilterWorkItemsThatAlreadyExistInTarget": true,
"PauseAfterEachWorkItem": false,
"AttachmentMaxSize": 480000000,
"CollapseRevisions": false,
"LinkMigrationSaveEachAsAdded": false,
"GenerateMigrationComment": true,
"NodeBasePaths": [
"Product\\Area\\Path1",
"Product\\Area\\Path2"
],
"WorkItemIDs": null
}
],
"Version": "0.0",
"workaroundForQuerySOAPBugEnabled": false,
"WorkItemTypeDefinition": {
"sourceWorkItemTypeName": "targetWorkItemTypeName"
}
// "Endpoints": {
// "InMemoryWorkItemEndpoints": [
// {
// "Name": "Source",
// "EndpointEnrichers": null
// },
// {
// "Name": "Target",
// "EndpointEnrichers": null
// }
// ]
// }
}
If you use this Azure DevOps Migration Tools, it can migrate Work Items, TestPlans & Suits, Teams, Shared Queries, & Pipelines from one Organization to another. You could refer to Processors: TfsAreaAndIterationProcessor, TfsTeamSettingsProcessor, TfsSharedQueryProcessor and Work Item Tracking Processor to create corresponding configuration Json files one by one.

Json dynamic fields to struct issue [duplicate]

This question already has answers here:
Decoding generic JSON objects to one of many formats
(1 answer)
How to parse a complicated JSON with Go unmarshal?
(3 answers)
golang | unmarshalling arbitrary data
(2 answers)
How to parse JSON in golang without unmarshaling twice
(3 answers)
Is it possible to partially decode and update JSON? (go)
(2 answers)
Closed 10 months ago.
Every time I resolve one issue I get into another similar but different.
I need to convert into struts below JSON, but some parts are dynamic. and I am not very experienced with this conversions and I get very confused.
Below I have two fields that are different not static ( I was able with the help of this forum resolve a similar issue with one dynamic entry, but now I have two).
the fields I called them "This string changes" so is less confusing.
[null,null,"hub:zWXKROOM","presence_state",{"74ce1906-af89-48a9-aec7-501369509000":{"metas":[{"context":{"embed":false,"mobile":false},"permissions":{"close_hub":false,"embed_hub":false,"fly":true,"join_hub":true,"kick_users":false,"mute_users":false,"pin_objects":false,"spawn_and_move_media":true,"spawn_camera":true,"spawn_drawing":false,"spawn_emoji":true,"update_hub":false,"update_hub_promotion":false,"update_roles":false},"phx_ref":"tGJf9IxredI=","phx_ref_prev":"zJ3pFzeYafM=","presence":"room","profile":{"avatarId":"3IADk9x","displayName":"real changeling"},"roles":{"creator":false,"owner":false,"signed_in":false}}]},"774e91d5-a324-47d7-ba75-edf9ed5bbe1a":{"metas":[{"context":{"embed":false,"mobile":false},"permissions":{"close_hub":false,"embed_hub":false,"fly":true,"join_hub":true,"kick_users":false,"mute_users":false,"pin_objects":false,"spawn_and_move_media":true,"spawn_camera":true,"spawn_drawing":false,"spawn_emoji":true,"update_hub":false,"update_hub_promotion":false,"update_roles":false},"phx_ref":"NnAT0YpIaUg=","phx_ref_prev":"tzv+xV6h0Rs=","presence":"room","profile":{"avatarId":"PcJ8Sxb","displayName":"GoBotWebSockets"},"roles":{"creator":false,"owner":false,"signed_in":false}}]},"9bd22f70-521a-49c2-9cb9-ac58dabfa1d6":{"metas":[{"context":{"embed":false,"mobile":false},"permissions":{"close_hub":false,"embed_hub":false,"fly":true,"join_hub":true,"kick_users":false,"mute_users":false,"pin_objects":false,"spawn_and_move_media":true,"spawn_camera":true,"spawn_drawing":false,"spawn_emoji":true,"update_hub":false,"update_hub_promotion":false,"update_roles":false},"phx_ref":"kPknFXlNkMo=","phx_ref_prev":"IA2Es263VdA=","presence":"room","profile":{"avatarId":"3IADk9x","displayName":"killab33z"},"roles":{"creator":false,"owner":false,"signed_in":false}}]},"f87b718a-c873-40a9-99db-91b4d0f7f4de":{"metas":[{"context":{"embed":false,"mobile":false},"permissions":{"close_hub":true,"embed_hub":true,"fly":true,"join_hub":true,"kick_users":true,"mute_users":true,"pin_objects":true,"spawn_and_move_media":true,"spawn_camera":true,"spawn_drawing":true,"spawn_emoji":true,"update_hub":true,"update_hub_promotion":false,"update_roles":true},"phx_ref":"XCCt44iesAo=","presence":"lobby","profile":{"avatarId":"https://s3.amazonaws.com/readyplayerbaker/avatars_baked/89e86e1a-43c7-4520-8f91-9a94ed42a722.glb","displayName":"ReK2"},"roles":{"creator":true,"owner":true,"signed_in":true}}]}}]
with a lot of help of someone here I was able to convert a very similar one but with two I cant seem to get it to work. I keep getting empty results..
the similar approach is Here
I have tried replicating with a field above and other similar things.
I have look many blogs for this situation, no luck.
I also have try this tools, but is not really working because the tool does not know the details
edit: here is from json lint:
[
null,
null,
"hub:zWXKROOM",
"presence_state",
{
"74ce1906-af89-48a9-aec7-501369509000": {
"metas": [
{
"context": {
"embed": false,
"mobile": false
},
"permissions": {
"close_hub": false,
"embed_hub": false,
"fly": true,
"join_hub": true,
"kick_users": false,
"mute_users": false,
"pin_objects": false,
"spawn_and_move_media": true,
"spawn_camera": true,
"spawn_drawing": false,
"spawn_emoji": true,
"update_hub": false,
"update_hub_promotion": false,
"update_roles": false
},
"phx_ref": "tGJf9IxredI=",
"phx_ref_prev": "zJ3pFzeYafM=",
"presence": "room",
"profile": {
"avatarId": "3IADk9x",
"displayName": "real changeling"
},
"roles": {
"creator": false,
"owner": false,
"signed_in": false
}
}
]
},
"774e91d5-a324-47d7-ba75-edf9ed5bbe1a": {
"metas": [
{
"context": {
"embed": false,
"mobile": false
},
"permissions": {
"close_hub": false,
"embed_hub": false,
"fly": true,
"join_hub": true,
"kick_users": false,
"mute_users": false,
"pin_objects": false,
"spawn_and_move_media": true,
"spawn_camera": true,
"spawn_drawing": false,
"spawn_emoji": true,
"update_hub": false,
"update_hub_promotion": false,
"update_roles": false
},
"phx_ref": "NnAT0YpIaUg=",
"phx_ref_prev": "tzv+xV6h0Rs=",
"presence": "room",
"profile": {
"avatarId": "PcJ8Sxb",
"displayName": "GoBotWebSockets"
},
"roles": {
"creator": false,
"owner": false,
"signed_in": false
}
}
]
},
"9bd22f70-521a-49c2-9cb9-ac58dabfa1d6": {
"metas": [
{
"context": {
"embed": false,
"mobile": false
},
"permissions": {
"close_hub": false,
"embed_hub": false,
"fly": true,
"join_hub": true,
"kick_users": false,
"mute_users": false,
"pin_objects": false,
"spawn_and_move_media": true,
"spawn_camera": true,
"spawn_drawing": false,
"spawn_emoji": true,
"update_hub": false,
"update_hub_promotion": false,
"update_roles": false
},
"phx_ref": "kPknFXlNkMo=",
"phx_ref_prev": "IA2Es263VdA=",
"presence": "room",
"profile": {
"avatarId": "3IADk9x",
"displayName": "killab33z"
},
"roles": {
"creator": false,
"owner": false,
"signed_in": false
}
}
]
},
"f87b718a-c873-40a9-99db-91b4d0f7f4de": {
"metas": [
{
"context": {
"embed": false,
"mobile": false
},
"permissions": {
"close_hub": true,
"embed_hub": true,
"fly": true,
"join_hub": true,
"kick_users": true,
"mute_users": true,
"pin_objects": true,
"spawn_and_move_media": true,
"spawn_camera": true,
"spawn_drawing": true,
"spawn_emoji": true,
"update_hub": true,
"update_hub_promotion": false,
"update_roles": true
},
"phx_ref": "XCCt44iesAo=",
"presence": "lobby",
"profile": {
"avatarId": "https://s3.amazonaws.com/readyplayerbaker/avatars_baked/89e86e1a-43c7-4520-8f91-9a94ed42a722.glb",
"displayName": "ReK2"
},
"roles": {
"creator": true,
"owner": true,
"signed_in": true
}
}
]
}
}
]
[edit2]
I did a lot of multiple tries but still is not printing anything at all!
is like there is no data but there is data... can someone please check my code and see what im I doing wrong? this last one I did step by step with a tutorial, still is not even showing any data, I wish it did so at least I know what to change..
https://gitlab.com/rek2/gohubsbot/-/blob/master/websocketsListen.go
thanks
ok after a lot of debugging, reading, reading, hitting my head and stripping json to make it simples to post on json to go tools... I finally got it..
the main issue?
//the type needs to be a map itself
type PresenceState map[string]State
Also:
// even do I did try this above I never called it the right way before so was giving up and moving on... but I have to use a pointer
for k, o := range *v {}
is working now. I am getting the keys and objets when there is a presence state at login or change so I can map id to usernames

Backstopjs site loading issue not solved by delay: what is wrong?

When I try to run backstopjs on certain sites (they all must have some dynamic rendering thing in common, though I don't know what), the screenshots generated by backstopjs only include the first piece of content, centered in the screen. Here's the URL to a screenshot: https://user-images.githubusercontent.com/41495147/63806833-1612b680-c8e2-11e9-9932-680864b470b7.png
I've already tried setting the delay to 5 seconds. I've tried waiting until the footer class is available before screenshot. No dice. What is going on? Here's my config file:
"id": "backstop_default",
"viewports": [
{
"label": "phone",
"width": 320,
"height": 480
},
{
"label": "tablet",
"width": 1024,
"height": 768
},
{
"label": "desktop",
"width": 1280,
"height": 1024
}
],
"onBeforeScript": "puppet/onBefore.js",
"onReadyScript": "puppet/onReady.js",
"scenarios": [
{
"label": "VMLYR Home",
"cookiePath": "backstop_data/engine_scripts/cookies.json",
"url": "https://www.vmlyr.com",
"referenceUrl": "",
"readyEvent": "",
"readySelector": ".region-footer",
"delay": 5000,
"hideSelectors": [],
"removeSelectors": [],
"hoverSelector": "",
"clickSelector": "",
"postInteractionWait": 0,
"selectors": [],
"selectorExpansion": true,
"expect": 0,
"misMatchThreshold" : 0.1,
"requireSameDimensions": true
}
],
"paths": {
"bitmaps_reference": "backstop_data/bitmaps_reference",
"bitmaps_test": "backstop_data/bitmaps_test",
"engine_scripts": "backstop_data/engine_scripts",
"html_report": "backstop_data/html_report",
"ci_report": "backstop_data/ci_report"
},
"report": ["browser"],
"engine": "puppeteer",
"engineOptions": {
"args": ["--no-sandbox"]
},
"asyncCaptureLimit": 5,
"asyncCompareLimit": 50,
"debug": false,
"debugWindow": false
}```

deserializing JSon with different Parent node names

I'm trying to deserializing a json response from Gitblit using c# and i'm getting it back in the following format
{
"https://localhost/git/libraries/xmlapache.git": {
"name": "libraries/xmlapache.git",
"description": "apache xmlrpc client and server",
"owner": "admin",
"lastChange": "2010-01-28T22:12:06Z",
"hasCommits": true,
"showRemoteBranches": false,
"useTickets": false,
"useDocs": false,
"accessRestriction": "VIEW",
"isFrozen": false,
"showReadme": false,
"federationStrategy": "FEDERATE_THIS",
"federationSets": [
"libraries"
],
"isFederated": false,
"skipSizeCalculation": false,
"skipSummaryMetrics": false,
"size": "102 KB"
},
"https://localhost/git/libraries/smack.git": {
"name": "libraries/smack.git",
"description": "smack xmpp client",
"owner": "admin",
"lastChange": "2009-01-28T18:38:14Z",
"hasCommits": true,
"showRemoteBranches": false,
"useTickets": false,
"useDocs": false,
"accessRestriction": "VIEW",
"isFrozen": false,
"showReadme": false,
"federationStrategy": "FEDERATE_THIS",
"federationSets": [],
"isFederated": false,
"skipSizeCalculation": false,
"skipSummaryMetrics": false,
"size": "4.8 MB"
}
}
and i'm having trouble creating the DataContract due to the parent nodes being named from the pull url and could be anything rather than a standardised name. i only need to deserialize "name":,"description":, "owner":, "lastChange" from underneath them.
so is there a way i can deserialize each dataset while ignoring the parent node all together?