Pimcore REST API: Create object with block - pimcore

I'm trying to create a new object via the pimcore rest api.
The object that I'm trying to create contains blocks. The object is created but the content, that should appear in the block is not created.
{
"className": "TrainingCourseMaterial",
"parentId": -1,
"key": "Slides",
"published": true,
"data": {
"elements": [{
"type": "block",
"value": [{
"contentlanguage": "de",
"slides": [{
"image__image": 1859,
"image__hotspots": {
"hotspots": [],
"marker": [],
"crop": []
}
},
{
"image__image": 1860,
"image__hotspots": {
"hotspots": [],
"marker": [],
"crop": []
}
},
{
"image__image": 1861,
"image__hotspots": {
"hotspots": [],
"marker": [],
"crop": []
}
},
{
"image__image": 1862,
"image__hotspots": {
"hotspots": [],
"marker": [],
"crop": []
}
},
{
"image__image": 1863,
"image__hotspots": {
"hotspots": [],
"marker": [],
"crop": []
}
},
{
"image__image": 1864,
"image__hotspots": {
"hotspots": [],
"marker": [],
"crop": []
}
},
{
"image__image": 1865,
"image__hotspots": {
"hotspots": [],
"marker": [],
"crop": []
}
},
{
"image__image": 1866,
"image__hotspots": {
"hotspots": [],
"marker": [],
"crop": []
}
},
{
"image__image": 1867,
"image__hotspots": {
"hotspots": [],
"marker": [],
"crop": []
}
},
{
"image__image": 1868,
"image__hotspots": {
"hotspots": [],
"marker": [],
"crop": []
}
},
{
"image__image": 1869,
"image__hotspots": {
"hotspots": [],
"marker": [],
"crop": []
}
}
]
}
],
"name": "SlidesBlock",
"language": null
}]
}
}
Not sure if this is even possible. Last resort would be to use the admin api.

Related

Creating Custom Account for Stripe

I tried creating Stripe Custom account on test mode
Request
const createAccountTeacher = catchAsync(async (req, res) =>
{
const date = new Date();
let time = Math.floor(date.getTime() / 1000);
const email = req.body.email;
const country = req.body.country;
const createAccount = await stripe.accounts.create({
type: 'custom',
country: country,
business_type: 'individual',
tos_acceptance: {
date: time,
ip: '8.8.8.8',
},
business_profile: {
mcc: '5734',
url: 'http://google.com',
product_description: 'Good Product',
support_phone: '12345567',
product_description: 'Teaching Courses available',
support_phone: '+10000000000',
},
individual: {
first_name: 'ABC',
last_name: 'XYZ',
dob: {
day: 24,
month: 6,
year: 1992,
},
address: {
line1: '1996 W Highland Ave',
postal_code: 90002,
city: ' San Bernardino',
state: 'California',
},
email: email,
phone: '+1202-555-0454',
ssn_last_4: 9999,
id_number: 123459999,
},
external_account: {
object: 'bank_account',
country: 'US',
currency: 'usd',
account_number: '000123456789',
routing_number: 121000358,
},
capabilities: {
card_payments: { requested: true },
transfers: { requested: true },
},
});
if (createAccount) {
res.status(200).json({ data: createAccount });
}
});
Here as response
{
"data":
{
"details_submitted": true,
"type": "custom",
"metadata": {},
"id": "acct_1LNcOTD4Ev4rC234",
"object": "account",
"default_currency": "usd",
"capabilities": {
"transfers": "pending",
"card_payments": "pending"
},
"business_type": "individual",
"individual": {
"metadata": {},
"id": "person_1LNcOVD4Ev4rC234OQjvuHiP",
"object": "person",
"account": "acct_1LNcOTD4Ev4rC234",
"dob": {
"year": 1992,
"day": 24,
"month": 6
},
"requirements": {
"currently_due": [],
"past_due": [],
"eventually_due": [],
"pending_verification": [
"id_number",
"verification.document"
],
"errors": [],
"alternatives": []
},
"ssn_last_4_provided": true,
"phone": "+12025550454",
"relationship": {
"percent_ownership": null,
"title": null,
"owner": false,
"director": false,
"representative": true,
"executive": false
},
"future_requirements": {
"currently_due": [],
"past_due": [],
"eventually_due": [],
"pending_verification": [],
"errors": [],
"alternatives": []
},
"verification": {
"status": "pending",
"details": null,
"document": {
"details_code": null,
"front": null,
"details": null,
"back": null
},
"additional_document": {
"details_code": null,
"front": null,
"details": null,
"back": null
},
"details_code": null
},
"address": {
"line2": null,
"line1": "1996 W Highland Ave",
"state": "California",
"postal_code": "90002",
"city": " San Bernardino",
"country": "US"
},
"email": "lilypota#ema-sofia.eu",
"created": 1658321573,
"first_name": "ABC",
"id_number_provided": true,
"last_name": "XYZ"
},
"charges_enabled": false,
"settings": {
"dashboard": {
"display_name": "Google",
"timezone": "Etc/UTC"
},
"payouts": {
"debit_negative_balances": false,
"statement_descriptor": null,
"schedule": {
"interval": "daily",
"delay_days": 2
}
},
"card_issuing": {
"tos_acceptance": {
"ip": null,
"date": null
}
},
"bacs_debit_payments": {},
"payments": {
"statement_descriptor_kanji": null,
"statement_descriptor_kana": null,
"statement_descriptor": "GOOGLE.COM"
},
"sepa_debit_payments": {},
"card_payments": {
"statement_descriptor_prefix_kanji": null,
"statement_descriptor_prefix": null,
"statement_descriptor_prefix_kana": null,
"decline_on": {
"avs_failure": false,
"cvc_failure": false
}
},
"branding": {
"icon": null,
"secondary_color": null,
"logo": null,
"primary_color": null
}
},
"tos_acceptance": {
"ip": "8.8.8.8",
"user_agent": null,
"date": 1658321567
},
"requirements": {
"current_deadline": null,
"past_due": [],
"errors": [],
"disabled_reason": "requirements.pending_verification",
"pending_verification": [
"individual.id_number",
"individual.verification.document"
],
"currently_due": [],
"eventually_due": [],
"alternatives": []
},
"payouts_enabled": false,
"company": {
"tax_id_provided": false,
"phone": "+12025550454",
"owners_provided": true,
"verification": {
"document": {
"details_code": null,
"front": null,
"details": null,
"back": null
}
},
"address": {
"line2": null,
"line1": "1996 W Highland Ave",
"state": "California",
"postal_code": "90002",
"city": " San Bernardino",
"country": "US"
},
"executives_provided": true,
"directors_provided": true,
"name": null
},
"external_accounts": {
"has_more": false,
"total_count": 1,
"object": "list",
"url": "/v1/accounts/acct_1LNcOTD4Ev4rC234/external_accounts",
"data": [
{
"last4": "6789",
"account_holder_name": null,
"metadata": {},
"id": "ba_1LNcOUD4Ev4rC234XwzzfiqR",
"object": "bank_account",
"account_type": null,
"default_for_currency": true,
"account_holder_type": null,
"account": "acct_1LNcOTD4Ev4rC234",
"status": "new",
"available_payout_methods": [
"standard"
],
"bank_name": "BANK OF AMERICA, N.A.",
"currency": "usd",
"country": "US",
"routing_number": "121000358",
"fingerprint": "gqPBt6FUMZJkqc9q"
}
]
},
"future_requirements": {
"current_deadline": null,
"past_due": [],
"errors": [],
"disabled_reason": null,
"pending_verification": [],
"currently_due": [],
"eventually_due": [],
"alternatives": []
},
"country": "US",
"email": null,
"created": 1658321576,
"business_profile": {
"support_email": null,
"product_description": "Teaching Courses available",
"mcc": "5734",
"support_url": null,
"support_address": null,
"url": "http://google.com",
"support_phone": "+10000000000",
"name": null
}
}
}
Custom Account is created but the problem it is restricted because of identity document
I am trying to upload the document like this
const updateAccount = catchAsync(async (req, res) => {
// let imagepath = ${req.protocol}://${req.get('host')}/uploads/${req.file.filename};
if(req.file.path){
const file = await stripe.files.create({
purpose: 'identity_document',
file: {
data: fs.readFileSync(req.file.path),
name: req.file.filename,
type: 'application/octet-stream',
},
}, {
stripeAccount: 'acct_1LNcOTD4Ev4rC234',
});
if(file){
res.status(200).json({data:file})
}
}
})
Still the custom account is restricted.
I would appreciate little help.

How to terminate process after certain duration in Netflix Conductor

I am trying to fork the process after it starts into parallel executions in Netflix Conductor. Some form an actual flow and the last one should be timer. So if the execution will not finish within the timer expiration time, e.g. 3 days, it will be terminated. I have defined the following workflow definition, but WAIT task with durations seems to have no effect.
{
"updateTime": 1658740176408,
"name": "test_definiton",
"description": "Edit or extend this sample workflow. Set the workflow name to get started",
"version": 1,
"tasks": [
{
"name": "fork_join",
"taskReferenceName": "my_fork_join_ref",
"inputParameters": {},
"type": "FORK_JOIN",
"decisionCases": {},
"defaultCase": [],
"forkTasks": [
[
{
"name": "process_notification_payload",
"taskReferenceName": "process_notification_payload_email",
"inputParameters": {},
"type": "SIMPLE",
"decisionCases": {},
"defaultCase": [],
"forkTasks": [],
"startDelay": 0,
"joinOn": [],
"optional": false,
"defaultExclusiveJoinTask": [],
"asyncComplete": false,
"loopOver": []
},
{
"name": "email_notification",
"taskReferenceName": "email_notification_ref",
"inputParameters": {},
"type": "SIMPLE",
"decisionCases": {},
"defaultCase": [],
"forkTasks": [],
"startDelay": 0,
"joinOn": [],
"optional": false,
"defaultExclusiveJoinTask": [],
"asyncComplete": false,
"loopOver": []
}
],
[
{
"name": "process_notification_payload",
"taskReferenceName": "process_notification_payload_sms",
"inputParameters": {},
"type": "SIMPLE",
"decisionCases": {},
"defaultCase": [],
"forkTasks": [],
"startDelay": 0,
"joinOn": [],
"optional": false,
"defaultExclusiveJoinTask": [],
"asyncComplete": false,
"loopOver": []
},
{
"name": "sms_notification",
"taskReferenceName": "sms_notification_ref",
"inputParameters": {},
"type": "SIMPLE",
"decisionCases": {},
"defaultCase": [],
"forkTasks": [],
"startDelay": 0,
"joinOn": [],
"optional": false,
"defaultExclusiveJoinTask": [],
"asyncComplete": false,
"loopOver": []
}
],
[
{
"name": "wait_task",
"taskReferenceName": "wait_task_ref",
"inputParameters": {
"duration": "3 days"
},
"type": "WAIT",
"decisionCases": {},
"defaultCase": [],
"forkTasks": [],
"startDelay": 0,
"joinOn": [],
"optional": false,
"defaultExclusiveJoinTask": [],
"asyncComplete": false,
"loopOver": []
},
{
"name": "terminate",
"taskReferenceName": "terminate0",
"inputParameters": {
"terminationStatus": "FAILED"
},
"type": "TERMINATE",
"decisionCases": {},
"defaultCase": [],
"forkTasks": [],
"startDelay": 0,
"joinOn": [],
"optional": false,
"defaultExclusiveJoinTask": [],
"asyncComplete": false,
"loopOver": []
}
]
],
"startDelay": 0,
"joinOn": [],
"optional": false,
"defaultExclusiveJoinTask": [],
"asyncComplete": false,
"loopOver": []
},
{
"name": "notification_join",
"taskReferenceName": "notification_join_ref",
"inputParameters": {},
"type": "JOIN",
"decisionCases": {},
"defaultCase": [],
"forkTasks": [],
"startDelay": 0,
"joinOn": [
"email_notification_ref",
"sms_notification_ref"
],
"optional": false,
"defaultExclusiveJoinTask": [],
"asyncComplete": false,
"loopOver": []
},
{
"name": "finish",
"taskReferenceName": "finish_ref",
"inputParameters": {
"terminationStatus": "COMPLETED"
},
"type": "TERMINATE",
"decisionCases": {},
"defaultCase": [],
"forkTasks": [],
"startDelay": 0,
"joinOn": [],
"optional": false,
"defaultExclusiveJoinTask": [],
"asyncComplete": false,
"loopOver": []
}
],
"inputParameters": [],
"outputParameters": {
"data": "${get_population_data.output.response.body.data}",
"source": "${get_population_data.output.response.body.source}"
},
"schemaVersion": 2,
"restartable": true,
"workflowStatusListenerEnabled": false,
"ownerEmail": "example#email.com",
"timeoutPolicy": "ALERT_ONLY",
"timeoutSeconds": 0,
"variables": {},
"inputTemplate": {}
}
Can you help me to determine whether I made a mistake or suggest alternative? Perhaps with an option to call failure sub workflow.
Wait task eventually gets finished, though not exactly as it should. Setting duration for 1 minute resulted in a waiting time of 33 minutes.
This can be configured, using e.g. an external SQS, for example, Amazon SQS. More information can be found in the official documentation https://conductor.netflix.com/reference-docs/event-task.html

convert few fields of a nested json to a dictionary in Pyspark

I have a huge nested json as below
"evaluation_parameters": {},
"meta": {
"active_batch_definition": {
"batch_identifiers": {
"pipeline_stage": "prod",
"run_id": "run_20220224"
},
"data_asset_name": "STORES_DQ_SUITE",
"data_connector_name": "stores_connector",
"datasource_name": "stores"
},
"batch_markers": {
"ge_load_time": "20220224T054318.272571Z"
},
"batch_spec": {
"batch_data": "SparkDataFrame",
"data_asset_name": "STORES_DQ_SUITE"
},
"expectation_suite_name": "STORES_DQ_SUITE",
"great_expectations_version": "0.14.7",
"run_id": {
"run_name": "stores_template_20220224-054316",
"run_time": "2022-02-24T05:43:16.678220+00:00"
},
"validation_time": "20220224T054318.389119Z"
},
"results": [
{
"exception_info": {
"exception_message": null,
"exception_traceback": null,
"raised_exception": false
},
"expectation_config": {
"expectation_type": "expect_column_to_exist",
"kwargs": {
"batch_id": "46f2769bf8c7729a40efddfa0597de22",
"column": "country"
},
"meta": {}
},
"meta": {},
"result": {},
"success": true
},
{
"exception_info": {
"exception_message": null,
"exception_traceback": null,
"raised_exception": false
},
"expectation_config": {
"expectation_type": "expect_column_values_to_not_be_null",
"kwargs": {
"batch_id": "46f2769bf8c7729a40efddfa0597de22",
"column": "country"
},
"meta": {}
},
"meta": {},
"result": {
"element_count": 102,
"partial_unexpected_counts": [],
"partial_unexpected_index_list": null,
"partial_unexpected_list": [],
"unexpected_count": 0,
"unexpected_percent": 0.0
},
"success": true
},
{
"exception_info": {
"exception_message": null,
"exception_traceback": null,
"raised_exception": false
},
"expectation_config": {
"expectation_type": "expect_column_values_to_be_of_type",
"kwargs": {
"batch_id": "46f2769bf8c7729a40efddfa0597de22",
"column": "country",
"type_": "StringType"
},
"meta": {}
},
"meta": {},
"result": {
"observed_value": "StringType"
},
"success": true
},
{
"exception_info": {
"exception_message": null,
"exception_traceback": null,
"raised_exception": false
},
"expectation_config": {
"expectation_type": "expect_column_to_exist",
"kwargs": {
"batch_id": "46f2769bf8c7729a40efddfa0597de22",
"column": "countray"
},
"meta": {}
},
"meta": {},
"result": {},
"success": false
},
{
"exception_info": {
"exception_message": null,
"exception_traceback": null,
"raised_exception": false
},
"expectation_config": {
"expectation_type": "expect_table_row_count_to_equal",
"kwargs": {
"batch_id": "46f2769bf8c7729a40efddfa0597de22",
"value": 10
},
"meta": {}
},
"meta": {},
"result": {
"observed_value": 102
},
"success": false
},
{
"exception_info": {
"exception_message": null,
"exception_traceback": null,
"raised_exception": false
},
"expectation_config": {
"expectation_type": "expect_column_sum_to_be_between",
"kwargs": {
"batch_id": "46f2769bf8c7729a40efddfa0597de22",
"column": "active_stores",
"max_value": 1000,
"min_value": 100
},
"meta": {}
},
"meta": {},
"result": {
"observed_value": 22075.0
},
"success": false
}
],
"statistics": {
"evaluated_expectations": 6,
"success_percent": 50.0,
"successful_expectations": 3,
"unsuccessful_expectations": 3
},
"success": false
}
I wanted to derive a table with with values with below lineage -
data_source : hardcode value
run_time : meta.run_id.run_time
expectation_type : results.expectation_config.expectation_type
expectations : results.expectation_config.kwargs (all values except batch_id in a dictionary)
results : results.result (everything as a dictionary)
Expected Result
+-------------------+--------------------------------+------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+---------------+
|data_source |run_time |expectation_type |expectations |results |success |
+-------------------+--------------------------------+------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+---------------+
|hardcoded_value |2022-02-24T05:43:16.678220+00:00|expect_column_to_exist |{"column": "country"} |{} |true |
|hardcoded_value |2022-02-24T05:43:16.678220+00:00|expect_column_values_to_not_be_null |{"column": "country"} |{"element_count": 102, "partial_unexpected_counts": [], "partial_unexpected_index_list": null, "partial_unexpected_list": [], "unexpected_count": 0, "unexpected_percent": 0.0} |true |
|hardcoded_value |2022-02-24T05:43:16.678220+00:00|expect_column_values_to_be_of_type |{"column": "country","type_": "StringType"} |{"observed_value": "StringType"} |true |
|hardcoded_value |2022-02-24T05:43:16.678220+00:00|expect_column_to_exist |{"column": "countray"} |{} |false |
|hardcoded_value |2022-02-24T05:43:16.678220+00:00|expect_table_row_count_to_equal |{"value": 10} |{"observed_value": 102} |false |
|hardcoded_value |2022-02-24T05:43:16.678220+00:00|expect_column_sum_to_be_between |{"column": "active_stores","max_value": 1000,"min_value": 100} |{"observed_value": 22075.0} |false |
+-------------------+--------------------------------+------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+---------------+
Can someone please help me with this.
Thank you in advance.
Convert the json into dataframe using the spark.read.json function.
After that, it gives you df with parent keys as separate columns.
After that you need to explode the results column using explode function of spark.sql.functions. For more details, read this
Then just select the fields that you need from the exploded column.
from pyspark.sql.functions import explode
df = spark.read.json(json_path)
df = df.select(df.meta.run_id.run_time, df.results)
df = df.withColumn("exploded_results", explode(df.results))
df = df.select(df.meta.run_id.run_time, df.exploded_results.expectation_config.expectation_type, df.exploded_results.expectation_config.kwargs, df.exploded_results.result, df.exploded_results.success)

How do one should set a custom agent pool in DevOps release definition?

I create release definitions using DevOps REST APIs. Due to lack of documentation I used to capture HTTP requests and examine JSON payload.
I'm able to set a release using Azure agent pools. It follows only the relevant node:
"deploymentInput": {
"parallelExecution": {
"parallelExecutionType": 0
},
"agentSpecification": {
"identifier": "windows-2019"
},
"skipArtifactsDownload": false,
"artifactsDownloadInput": {},
"queueId": 749,
"demands": [],
"enableAccessToken": false,
"timeoutInMinutes": 0,
"jobCancelTimeoutInMinutes": 1,
"condition": "succeeded()",
"overrideInputs": {},
"dependencies": []
}
I want to set a custom defined agent pool, but if I try to capture the request I still can't undertand how to set it. This is the full JSON of an empty release with custom agent set:
{
"id": 0,
"name": "New release pipeline",
"source": 2,
"comment": "",
"createdOn": "2020-10-31T10:02:19.034Z",
"createdBy": null,
"modifiedBy": null,
"modifiedOn": "2020-10-31T10:02:19.034Z",
"environments": [
{
"id": -1,
"name": "Stage 1",
"rank": 1,
"variables": {},
"variableGroups": [],
"preDeployApprovals": {
"approvals": [
{
"rank": 1,
"isAutomated": true,
"isNotificationOn": false,
"id": 0
}
],
"approvalOptions": {
"executionOrder": 1
}
},
"deployStep": {
"tasks": [],
"id": 0
},
"postDeployApprovals": {
"approvals": [
{
"rank": 1,
"isAutomated": true,
"isNotificationOn": false,
"id": 0
}
],
"approvalOptions": {
"executionOrder": 2
}
},
"deployPhases": [
{
"deploymentInput": {
"parallelExecution": {
"parallelExecutionType": 0
},
"agentSpecification": null,
"skipArtifactsDownload": false,
"artifactsDownloadInput": {},
"queueId": 1039,
"demands": [],
"enableAccessToken": false,
"timeoutInMinutes": 0,
"jobCancelTimeoutInMinutes": 1,
"condition": "succeeded()",
"overrideInputs": {},
"dependencies": []
},
"rank": 1,
"phaseType": 1,
"name": "Agent job",
"refName": null,
"workflowTasks": [],
"phaseInputs": {
"phaseinput_artifactdownloadinput": {
"artifactsDownloadInput": {},
"skipArtifactsDownload": false
}
}
}
],
"runOptions": {},
"environmentOptions": {
"emailNotificationType": "OnlyOnFailure",
"emailRecipients": "release.environment.owner;release.creator",
"skipArtifactsDownload": false,
"timeoutInMinutes": 0,
"enableAccessToken": false,
"publishDeploymentStatus": true,
"badgeEnabled": false,
"autoLinkWorkItems": false,
"pullRequestDeploymentEnabled": false
},
"demands": [],
"conditions": [
{
"conditionType": 1,
"name": "ReleaseStarted",
"value": ""
}
],
"executionPolicy": {
"concurrencyCount": 1,
"queueDepthCount": 0
},
"schedules": [],
"properties": {
"LinkBoardsWorkItems": false,
"BoardsEnvironmentType": "unmapped"
},
"preDeploymentGates": {
"id": 0,
"gatesOptions": null,
"gates": []
},
"postDeploymentGates": {
"id": 0,
"gatesOptions": null,
"gates": []
},
"environmentTriggers": [],
"owner": {
"displayName": "Giacomo Stelluti Scala",
"id": "3617734a-1751-66f2-8343-c71c1398b5e6",
"isAadIdentity": true,
"isContainer": false,
"uniqueName": "giacomo.stelluti#dev4side.com",
"url": "https://dev.azure.com/dev4side/"
},
"retentionPolicy": {
"daysToKeep": 30,
"releasesToKeep": 3,
"retainBuild": true
},
"processParameters": {}
}
],
"artifacts": [],
"variables": {},
"variableGroups": [],
"triggers": [],
"lastRelease": null,
"tags": [],
"path": "\\test-poc",
"properties": {
"DefinitionCreationSource": "ReleaseNew",
"IntegrateJiraWorkItems": "false",
"IntegrateBoardsWorkItems": false
},
"releaseNameFormat": "Release-$(rev:r)",
"description": ""
}
Where do this is agent is set? Anyone knows how to do it properly?
Any help really appreciated.
Giacomo S. S.
I've found the solution in this question.
"deploymentInput": {
"parallelExecution": {
"parallelExecutionType": 0
},
"agentSpecification": null,
"skipArtifactsDownload": false,
"artifactsDownloadInput": {},
"queueId": 1039,
"demands": [],
"enableAccessToken": false,
"timeoutInMinutes": 0,
"jobCancelTimeoutInMinutes": 1,
"condition": "succeeded()",
"overrideInputs": {},
"dependencies": []
}
agentSpecification must be null and queueId must be set.

Adding weather widget powerline

I'm trying to enable weather widget for powerline but with no success. I added this code
{
"name": "weather",
"priority": 50,
"args": {
"unit": "F",
"location_query": "oslo, norway"
}
}
at the end of my theme file. When I start MacVim I get an error
Error detected while processing VimEnter Auto commands for "*":
2014-01-30 14:13:11,122:ERROR:vim:segment_generator:Failed to generate segment from {u'priority': 50, u'args': {u'location_query': u'oslo, norway', u'u
nit': u'F'}, u'name': u'weather'}: 'module' object has no attribute 'weather'
I've read powerline documentation but I'm still confused. Any help will be appreciated.
Here is how I configured weather for the Shell. I don't think it is possible with VI as it is not listed in the available segment
https://powerline.readthedocs.org/en/latest/configuration/segments/vim.html
Create the following folder tree:
~/.config/powerline/config.json
~/.config/powerline/themes/shell/netsamir.json
~/.config/powerline/colorschemes/shell/netsamir.json
~/.config/powerline/config.json
{
"common": {
"term_truecolor": false
},
"ext": {
"shell": {
"theme": "netsamir",
"colorscheme": "netsamir"
},
}
}
~/.config/powerline/themes/shell/netsamir.json
{
"segments": {
"above": [
{
"left": [
{
"function": "powerline.segments.common.wthr.weather",
"args": {
"unit": "C"
}
},
{
"name": "user",
"function": "powerline.segments.common.env.user",
"priority": 30
},
{
"function": "powerline.segments.common.vcs.branch",
"args": {
"status_colors": true,
"ignore_statuses": ["U"]
}
},
{
"name": "cwd",
"function": "powerline.segments.common.env.cwd"
}
]
}
],
"left": [
{
"type": "string",
"contents": ">",
"highlight_groups": ["promptline"],
"draw_soft_divider": false
},
{
"type": "string",
"contents": "",
"highlight_groups": ["blank"],
"draw_hard_divider": false
}
]
}
}
~/.config/powerline/colorschemes/shell/netsamir.json
{
"name": "netsamir",
"groups": {
"information:additional": { "fg": "gray9", "bg": "gray4", "attrs": [] },
"information:regular": { "fg": "gray10", "bg": "gray4", "attrs": [] },
"information:highlighted": { "fg": "white", "bg": "gray4", "attrs": ["bold"] },
"information:priority": { "fg": "brightyellow", "bg": "mediumorange", "attrs": [] },
"hostname": { "fg": "black", "bg": "gray10", "attrs": ["bold"] },
"background:divider": "information:additional",
"weather": { "fg": "gray9", "bg": "gray2", "attrs": [] },
"user": { "fg": "brightcyan", "bg": "darkestblue", "attrs": ["bold"] },
"branch": { "fg": "gray9", "bg": "gray2", "attrs": [] },
"branch_dirty": { "fg": "black", "bg": "orangered", "attrs": [] },
"branch_clean": { "fg": "gray9", "bg": "gray2", "attrs": [] },
"branch:divider": { "fg": "gray7", "bg": "gray2", "attrs": [] },
"cwd": "information:additional",
"cwd:current_folder": "information:highlighted",
"cwd:divider": { "fg": "gray7", "bg": "gray4", "attrs": [] },
"promptline": { "fg": "white", "bg": "darkestblue", "attrs": ["bold"] },
"blank": { "fg": "black", "bg": "black", "attrs": [] }
}
}

Resources