Merge remote-tracking branch 'origin/main' into plugin-hostcatalogs

pull/1529/head
Jeff Mitchell 5 years ago
commit a71cdae204

134
.circleci/config.yml generated

@ -29,49 +29,49 @@ jobs:
command: mkdir -p .buildcache && echo "*" > .buildcache/.gitignore
name: Ignore .buildcache
- restore_cache:
key: package-71fa601616a225899af09c8369ed9fc2d6d998e2-{{checksum ".buildcache/cache-keys/package-71fa601616a225899af09c8369ed9fc2d6d998e2"}}
key: package-ddd70200153c0e65302afe88a629b4440e8eebfa-{{checksum ".buildcache/cache-keys/package-ddd70200153c0e65302afe88a629b4440e8eebfa"}}
name: Restore package cache
- restore_cache:
key: package-97f48b65a2b1d85a686a4799e8aff6926cfbe04f-{{checksum ".buildcache/cache-keys/package-97f48b65a2b1d85a686a4799e8aff6926cfbe04f"}}
key: package-dca0ba02c8e7351c77442c86523f121a8c83039e-{{checksum ".buildcache/cache-keys/package-dca0ba02c8e7351c77442c86523f121a8c83039e"}}
name: Restore package cache
- restore_cache:
key: package-b6d16e35ea34fa5749ed92c8062e1cea96fd24f4-{{checksum ".buildcache/cache-keys/package-b6d16e35ea34fa5749ed92c8062e1cea96fd24f4"}}
key: package-72b242aa7004a2a706bd894df71c002671e8e425-{{checksum ".buildcache/cache-keys/package-72b242aa7004a2a706bd894df71c002671e8e425"}}
name: Restore package cache
- restore_cache:
key: package-c8401fcf82b86f62e39516bc847423296f351dcf-{{checksum ".buildcache/cache-keys/package-c8401fcf82b86f62e39516bc847423296f351dcf"}}
key: package-2409a41c5c5a434b5dbf77d29deb31a24c825dea-{{checksum ".buildcache/cache-keys/package-2409a41c5c5a434b5dbf77d29deb31a24c825dea"}}
name: Restore package cache
- restore_cache:
key: package-30ba4053218e6b6a3f39b9d67cc430f9385e70cb-{{checksum ".buildcache/cache-keys/package-30ba4053218e6b6a3f39b9d67cc430f9385e70cb"}}
key: package-1439f62f659d115a6675bef8de63d7a576370da6-{{checksum ".buildcache/cache-keys/package-1439f62f659d115a6675bef8de63d7a576370da6"}}
name: Restore package cache
- restore_cache:
key: package-809c3d22aa07bb5c66dca99d44efdd2bb37f2525-{{checksum ".buildcache/cache-keys/package-809c3d22aa07bb5c66dca99d44efdd2bb37f2525"}}
key: package-192814968c68e1c819d216b95925afb4099aaa07-{{checksum ".buildcache/cache-keys/package-192814968c68e1c819d216b95925afb4099aaa07"}}
name: Restore package cache
- restore_cache:
key: package-1efe1513c2b57d32392ed4a4601de1699c813457-{{checksum ".buildcache/cache-keys/package-1efe1513c2b57d32392ed4a4601de1699c813457"}}
key: package-b734ede0c13f22ffc1835626300d3b3226acaf53-{{checksum ".buildcache/cache-keys/package-b734ede0c13f22ffc1835626300d3b3226acaf53"}}
name: Restore package cache
- restore_cache:
key: package-384724ac89d695a73abb78aad60acd3c7c2c4270-{{checksum ".buildcache/cache-keys/package-384724ac89d695a73abb78aad60acd3c7c2c4270"}}
key: package-a1ae7292c28d5e629eea144b2fafb6f234ca4ffa-{{checksum ".buildcache/cache-keys/package-a1ae7292c28d5e629eea144b2fafb6f234ca4ffa"}}
name: Restore package cache
- restore_cache:
key: package-45fe69b6571284299223e8245f29c40ffd9c43dd-{{checksum ".buildcache/cache-keys/package-45fe69b6571284299223e8245f29c40ffd9c43dd"}}
key: package-b9d9cc299dc96f89f71285c520b4cd244879207a-{{checksum ".buildcache/cache-keys/package-b9d9cc299dc96f89f71285c520b4cd244879207a"}}
name: Restore package cache
- restore_cache:
key: package-e5c674f011608ff0ec51285ea342fe7a95f803e5-{{checksum ".buildcache/cache-keys/package-e5c674f011608ff0ec51285ea342fe7a95f803e5"}}
key: package-744fe90a3f2bd01206fbf95ad9d8a8db6b66d84d-{{checksum ".buildcache/cache-keys/package-744fe90a3f2bd01206fbf95ad9d8a8db6b66d84d"}}
name: Restore package cache
- restore_cache:
key: package-291ec533f804015a31ec89443c1100a2ae526856-{{checksum ".buildcache/cache-keys/package-291ec533f804015a31ec89443c1100a2ae526856"}}
key: package-489303d43b63583b456fef92eeef9806a25ef98d-{{checksum ".buildcache/cache-keys/package-489303d43b63583b456fef92eeef9806a25ef98d"}}
name: Restore package cache
- restore_cache:
key: package-d2dbe866712854d51e105c9b8b33ff5e22d1287c-{{checksum ".buildcache/cache-keys/package-d2dbe866712854d51e105c9b8b33ff5e22d1287c"}}
key: package-394f426443d3ce6446a86a2982253a2637c061e0-{{checksum ".buildcache/cache-keys/package-394f426443d3ce6446a86a2982253a2637c061e0"}}
name: Restore package cache
- restore_cache:
key: package-4571f72423cae365aaad07ec1a16fffe7aaa468e-{{checksum ".buildcache/cache-keys/package-4571f72423cae365aaad07ec1a16fffe7aaa468e"}}
key: package-e13e5614c5583baf504ebe29fbf99c21f9447926-{{checksum ".buildcache/cache-keys/package-e13e5614c5583baf504ebe29fbf99c21f9447926"}}
name: Restore package cache
- restore_cache:
key: package-93124fe6bd57406690ddeef8d33ee90d67f3eb04-{{checksum ".buildcache/cache-keys/package-93124fe6bd57406690ddeef8d33ee90d67f3eb04"}}
key: package-3fc99d62bf311ae70f4e4d9fd3f2a063691f00b9-{{checksum ".buildcache/cache-keys/package-3fc99d62bf311ae70f4e4d9fd3f2a063691f00b9"}}
name: Restore package cache
- restore_cache:
key: package-a60bb9fef193b738fd139723936b23ea38b18d25-{{checksum ".buildcache/cache-keys/package-a60bb9fef193b738fd139723936b23ea38b18d25"}}
key: package-7f7bcfc7f39c7e62aeba7d94b345a3ebdf2ab7f8-{{checksum ".buildcache/cache-keys/package-7f7bcfc7f39c7e62aeba7d94b345a3ebdf2ab7f8"}}
name: Restore package cache
- run:
command: make package-meta-all
@ -83,20 +83,20 @@ jobs:
command: ls -lahR .buildcache
name: List Build Cache
- run:
command: cp packages*.lock/pkgs.yml lockfile-591648cfcfc4fec2.yml
command: cp packages*.lock/pkgs.yml lockfile-fd66eb9f99de501d.yml
name: Update Lockfile Name
- run:
command: tar -czf packages-591648cfcfc4fec2.tar.gz .buildcache/packages lockfile-591648cfcfc4fec2.yml
command: tar -czf packages-fd66eb9f99de501d.tar.gz .buildcache/packages lockfile-fd66eb9f99de501d.yml
name: Create Raw Package Tarball
- run:
command: tar -czf meta-591648cfcfc4fec2.tar.gz .buildcache/packages/store/*.json lockfile-591648cfcfc4fec2.yml
command: tar -czf meta-fd66eb9f99de501d.tar.gz .buildcache/packages/store/*.json lockfile-fd66eb9f99de501d.yml
name: Create Metadata Tarball
- store_artifacts:
path: lockfile-591648cfcfc4fec2.yml
path: lockfile-fd66eb9f99de501d.yml
- store_artifacts:
path: packages-591648cfcfc4fec2.tar.gz
path: packages-fd66eb9f99de501d.tar.gz
- store_artifacts:
path: meta-591648cfcfc4fec2.tar.gz
path: meta-fd66eb9f99de501d.tar.gz
- store_artifacts:
path: .buildcache/packages
environment:
@ -111,7 +111,7 @@ jobs:
- AUTO_INSTALL_TOOLS: 'YES'
- BUILDKIT_PROGRESS: plain
- PRODUCT_REVISION: ''
- PACKAGE_SPEC_ID: 71fa601616a225899af09c8369ed9fc2d6d998e2
- PACKAGE_SPEC_ID: ddd70200153c0e65302afe88a629b4440e8eebfa
steps:
- setup_remote_docker:
docker_layer_caching: false
@ -125,7 +125,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-71fa601616a225899af09c8369ed9fc2d6d998e2-{{checksum ".buildcache/cache-keys/package-71fa601616a225899af09c8369ed9fc2d6d998e2"}}
key: package-ddd70200153c0e65302afe88a629b4440e8eebfa-{{checksum ".buildcache/cache-keys/package-ddd70200153c0e65302afe88a629b4440e8eebfa"}}
name: Restore package cache
- run:
command: |2
@ -171,7 +171,7 @@ jobs:
command: ls -lahR .buildcache/packages
name: List packages
- save_cache:
key: package-71fa601616a225899af09c8369ed9fc2d6d998e2-{{checksum ".buildcache/cache-keys/package-71fa601616a225899af09c8369ed9fc2d6d998e2"}}
key: package-ddd70200153c0e65302afe88a629b4440e8eebfa-{{checksum ".buildcache/cache-keys/package-ddd70200153c0e65302afe88a629b4440e8eebfa"}}
name: Save package cache
paths:
- .buildcache/packages/store
@ -183,7 +183,7 @@ jobs:
- AUTO_INSTALL_TOOLS: 'YES'
- BUILDKIT_PROGRESS: plain
- PRODUCT_REVISION: ''
- PACKAGE_SPEC_ID: 93124fe6bd57406690ddeef8d33ee90d67f3eb04
- PACKAGE_SPEC_ID: 3fc99d62bf311ae70f4e4d9fd3f2a063691f00b9
steps:
- setup_remote_docker:
docker_layer_caching: false
@ -197,7 +197,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-93124fe6bd57406690ddeef8d33ee90d67f3eb04-{{checksum ".buildcache/cache-keys/package-93124fe6bd57406690ddeef8d33ee90d67f3eb04"}}
key: package-3fc99d62bf311ae70f4e4d9fd3f2a063691f00b9-{{checksum ".buildcache/cache-keys/package-3fc99d62bf311ae70f4e4d9fd3f2a063691f00b9"}}
name: Restore package cache
- run:
command: |2
@ -243,7 +243,7 @@ jobs:
command: ls -lahR .buildcache/packages
name: List packages
- save_cache:
key: package-93124fe6bd57406690ddeef8d33ee90d67f3eb04-{{checksum ".buildcache/cache-keys/package-93124fe6bd57406690ddeef8d33ee90d67f3eb04"}}
key: package-3fc99d62bf311ae70f4e4d9fd3f2a063691f00b9-{{checksum ".buildcache/cache-keys/package-3fc99d62bf311ae70f4e4d9fd3f2a063691f00b9"}}
name: Save package cache
paths:
- .buildcache/packages/store
@ -378,7 +378,7 @@ jobs:
- AUTO_INSTALL_TOOLS: 'YES'
- BUILDKIT_PROGRESS: plain
- PRODUCT_REVISION: ''
- PACKAGE_SPEC_ID: 384724ac89d695a73abb78aad60acd3c7c2c4270
- PACKAGE_SPEC_ID: a1ae7292c28d5e629eea144b2fafb6f234ca4ffa
steps:
- setup_remote_docker:
docker_layer_caching: false
@ -392,7 +392,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-384724ac89d695a73abb78aad60acd3c7c2c4270-{{checksum ".buildcache/cache-keys/package-384724ac89d695a73abb78aad60acd3c7c2c4270"}}
key: package-a1ae7292c28d5e629eea144b2fafb6f234ca4ffa-{{checksum ".buildcache/cache-keys/package-a1ae7292c28d5e629eea144b2fafb6f234ca4ffa"}}
name: Restore package cache
- run:
command: |2
@ -438,7 +438,7 @@ jobs:
command: ls -lahR .buildcache/packages
name: List packages
- save_cache:
key: package-384724ac89d695a73abb78aad60acd3c7c2c4270-{{checksum ".buildcache/cache-keys/package-384724ac89d695a73abb78aad60acd3c7c2c4270"}}
key: package-a1ae7292c28d5e629eea144b2fafb6f234ca4ffa-{{checksum ".buildcache/cache-keys/package-a1ae7292c28d5e629eea144b2fafb6f234ca4ffa"}}
name: Save package cache
paths:
- .buildcache/packages/store
@ -450,7 +450,7 @@ jobs:
- AUTO_INSTALL_TOOLS: 'YES'
- BUILDKIT_PROGRESS: plain
- PRODUCT_REVISION: ''
- PACKAGE_SPEC_ID: 1efe1513c2b57d32392ed4a4601de1699c813457
- PACKAGE_SPEC_ID: b734ede0c13f22ffc1835626300d3b3226acaf53
steps:
- setup_remote_docker:
docker_layer_caching: false
@ -464,7 +464,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-1efe1513c2b57d32392ed4a4601de1699c813457-{{checksum ".buildcache/cache-keys/package-1efe1513c2b57d32392ed4a4601de1699c813457"}}
key: package-b734ede0c13f22ffc1835626300d3b3226acaf53-{{checksum ".buildcache/cache-keys/package-b734ede0c13f22ffc1835626300d3b3226acaf53"}}
name: Restore package cache
- run:
command: |2
@ -510,7 +510,7 @@ jobs:
command: ls -lahR .buildcache/packages
name: List packages
- save_cache:
key: package-1efe1513c2b57d32392ed4a4601de1699c813457-{{checksum ".buildcache/cache-keys/package-1efe1513c2b57d32392ed4a4601de1699c813457"}}
key: package-b734ede0c13f22ffc1835626300d3b3226acaf53-{{checksum ".buildcache/cache-keys/package-b734ede0c13f22ffc1835626300d3b3226acaf53"}}
name: Save package cache
paths:
- .buildcache/packages/store
@ -533,7 +533,7 @@ jobs:
- AUTO_INSTALL_TOOLS: 'YES'
- BUILDKIT_PROGRESS: plain
- PRODUCT_REVISION: ''
- PACKAGE_SPEC_ID: 30ba4053218e6b6a3f39b9d67cc430f9385e70cb
- PACKAGE_SPEC_ID: 1439f62f659d115a6675bef8de63d7a576370da6
steps:
- setup_remote_docker:
docker_layer_caching: false
@ -547,7 +547,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-30ba4053218e6b6a3f39b9d67cc430f9385e70cb-{{checksum ".buildcache/cache-keys/package-30ba4053218e6b6a3f39b9d67cc430f9385e70cb"}}
key: package-1439f62f659d115a6675bef8de63d7a576370da6-{{checksum ".buildcache/cache-keys/package-1439f62f659d115a6675bef8de63d7a576370da6"}}
name: Restore package cache
- run:
command: |2
@ -593,7 +593,7 @@ jobs:
command: ls -lahR .buildcache/packages
name: List packages
- save_cache:
key: package-30ba4053218e6b6a3f39b9d67cc430f9385e70cb-{{checksum ".buildcache/cache-keys/package-30ba4053218e6b6a3f39b9d67cc430f9385e70cb"}}
key: package-1439f62f659d115a6675bef8de63d7a576370da6-{{checksum ".buildcache/cache-keys/package-1439f62f659d115a6675bef8de63d7a576370da6"}}
name: Save package cache
paths:
- .buildcache/packages/store
@ -605,7 +605,7 @@ jobs:
- AUTO_INSTALL_TOOLS: 'YES'
- BUILDKIT_PROGRESS: plain
- PRODUCT_REVISION: ''
- PACKAGE_SPEC_ID: 97f48b65a2b1d85a686a4799e8aff6926cfbe04f
- PACKAGE_SPEC_ID: dca0ba02c8e7351c77442c86523f121a8c83039e
steps:
- setup_remote_docker:
docker_layer_caching: false
@ -619,7 +619,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-97f48b65a2b1d85a686a4799e8aff6926cfbe04f-{{checksum ".buildcache/cache-keys/package-97f48b65a2b1d85a686a4799e8aff6926cfbe04f"}}
key: package-dca0ba02c8e7351c77442c86523f121a8c83039e-{{checksum ".buildcache/cache-keys/package-dca0ba02c8e7351c77442c86523f121a8c83039e"}}
name: Restore package cache
- run:
command: |2
@ -665,7 +665,7 @@ jobs:
command: ls -lahR .buildcache/packages
name: List packages
- save_cache:
key: package-97f48b65a2b1d85a686a4799e8aff6926cfbe04f-{{checksum ".buildcache/cache-keys/package-97f48b65a2b1d85a686a4799e8aff6926cfbe04f"}}
key: package-dca0ba02c8e7351c77442c86523f121a8c83039e-{{checksum ".buildcache/cache-keys/package-dca0ba02c8e7351c77442c86523f121a8c83039e"}}
name: Save package cache
paths:
- .buildcache/packages/store
@ -677,7 +677,7 @@ jobs:
- AUTO_INSTALL_TOOLS: 'YES'
- BUILDKIT_PROGRESS: plain
- PRODUCT_REVISION: ''
- PACKAGE_SPEC_ID: b6d16e35ea34fa5749ed92c8062e1cea96fd24f4
- PACKAGE_SPEC_ID: 72b242aa7004a2a706bd894df71c002671e8e425
steps:
- setup_remote_docker:
docker_layer_caching: false
@ -691,7 +691,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-b6d16e35ea34fa5749ed92c8062e1cea96fd24f4-{{checksum ".buildcache/cache-keys/package-b6d16e35ea34fa5749ed92c8062e1cea96fd24f4"}}
key: package-72b242aa7004a2a706bd894df71c002671e8e425-{{checksum ".buildcache/cache-keys/package-72b242aa7004a2a706bd894df71c002671e8e425"}}
name: Restore package cache
- run:
command: |2
@ -737,7 +737,7 @@ jobs:
command: ls -lahR .buildcache/packages
name: List packages
- save_cache:
key: package-b6d16e35ea34fa5749ed92c8062e1cea96fd24f4-{{checksum ".buildcache/cache-keys/package-b6d16e35ea34fa5749ed92c8062e1cea96fd24f4"}}
key: package-72b242aa7004a2a706bd894df71c002671e8e425-{{checksum ".buildcache/cache-keys/package-72b242aa7004a2a706bd894df71c002671e8e425"}}
name: Save package cache
paths:
- .buildcache/packages/store
@ -749,7 +749,7 @@ jobs:
- AUTO_INSTALL_TOOLS: 'YES'
- BUILDKIT_PROGRESS: plain
- PRODUCT_REVISION: ''
- PACKAGE_SPEC_ID: 291ec533f804015a31ec89443c1100a2ae526856
- PACKAGE_SPEC_ID: 489303d43b63583b456fef92eeef9806a25ef98d
steps:
- setup_remote_docker:
docker_layer_caching: false
@ -763,7 +763,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-291ec533f804015a31ec89443c1100a2ae526856-{{checksum ".buildcache/cache-keys/package-291ec533f804015a31ec89443c1100a2ae526856"}}
key: package-489303d43b63583b456fef92eeef9806a25ef98d-{{checksum ".buildcache/cache-keys/package-489303d43b63583b456fef92eeef9806a25ef98d"}}
name: Restore package cache
- run:
command: |2
@ -809,7 +809,7 @@ jobs:
command: ls -lahR .buildcache/packages
name: List packages
- save_cache:
key: package-291ec533f804015a31ec89443c1100a2ae526856-{{checksum ".buildcache/cache-keys/package-291ec533f804015a31ec89443c1100a2ae526856"}}
key: package-489303d43b63583b456fef92eeef9806a25ef98d-{{checksum ".buildcache/cache-keys/package-489303d43b63583b456fef92eeef9806a25ef98d"}}
name: Save package cache
paths:
- .buildcache/packages/store
@ -821,7 +821,7 @@ jobs:
- AUTO_INSTALL_TOOLS: 'YES'
- BUILDKIT_PROGRESS: plain
- PRODUCT_REVISION: ''
- PACKAGE_SPEC_ID: 809c3d22aa07bb5c66dca99d44efdd2bb37f2525
- PACKAGE_SPEC_ID: 192814968c68e1c819d216b95925afb4099aaa07
steps:
- setup_remote_docker:
docker_layer_caching: false
@ -835,7 +835,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-809c3d22aa07bb5c66dca99d44efdd2bb37f2525-{{checksum ".buildcache/cache-keys/package-809c3d22aa07bb5c66dca99d44efdd2bb37f2525"}}
key: package-192814968c68e1c819d216b95925afb4099aaa07-{{checksum ".buildcache/cache-keys/package-192814968c68e1c819d216b95925afb4099aaa07"}}
name: Restore package cache
- run:
command: |2
@ -881,7 +881,7 @@ jobs:
command: ls -lahR .buildcache/packages
name: List packages
- save_cache:
key: package-809c3d22aa07bb5c66dca99d44efdd2bb37f2525-{{checksum ".buildcache/cache-keys/package-809c3d22aa07bb5c66dca99d44efdd2bb37f2525"}}
key: package-192814968c68e1c819d216b95925afb4099aaa07-{{checksum ".buildcache/cache-keys/package-192814968c68e1c819d216b95925afb4099aaa07"}}
name: Save package cache
paths:
- .buildcache/packages/store
@ -903,7 +903,7 @@ jobs:
- AUTO_INSTALL_TOOLS: 'YES'
- BUILDKIT_PROGRESS: plain
- PRODUCT_REVISION: ''
- PACKAGE_SPEC_ID: 4571f72423cae365aaad07ec1a16fffe7aaa468e
- PACKAGE_SPEC_ID: e13e5614c5583baf504ebe29fbf99c21f9447926
steps:
- setup_remote_docker:
docker_layer_caching: false
@ -917,7 +917,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-4571f72423cae365aaad07ec1a16fffe7aaa468e-{{checksum ".buildcache/cache-keys/package-4571f72423cae365aaad07ec1a16fffe7aaa468e"}}
key: package-e13e5614c5583baf504ebe29fbf99c21f9447926-{{checksum ".buildcache/cache-keys/package-e13e5614c5583baf504ebe29fbf99c21f9447926"}}
name: Restore package cache
- run:
command: |2
@ -963,7 +963,7 @@ jobs:
command: ls -lahR .buildcache/packages
name: List packages
- save_cache:
key: package-4571f72423cae365aaad07ec1a16fffe7aaa468e-{{checksum ".buildcache/cache-keys/package-4571f72423cae365aaad07ec1a16fffe7aaa468e"}}
key: package-e13e5614c5583baf504ebe29fbf99c21f9447926-{{checksum ".buildcache/cache-keys/package-e13e5614c5583baf504ebe29fbf99c21f9447926"}}
name: Save package cache
paths:
- .buildcache/packages/store
@ -1125,7 +1125,7 @@ jobs:
- AUTO_INSTALL_TOOLS: 'YES'
- BUILDKIT_PROGRESS: plain
- PRODUCT_REVISION: ''
- PACKAGE_SPEC_ID: e5c674f011608ff0ec51285ea342fe7a95f803e5
- PACKAGE_SPEC_ID: 744fe90a3f2bd01206fbf95ad9d8a8db6b66d84d
steps:
- setup_remote_docker:
docker_layer_caching: false
@ -1139,7 +1139,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-e5c674f011608ff0ec51285ea342fe7a95f803e5-{{checksum ".buildcache/cache-keys/package-e5c674f011608ff0ec51285ea342fe7a95f803e5"}}
key: package-744fe90a3f2bd01206fbf95ad9d8a8db6b66d84d-{{checksum ".buildcache/cache-keys/package-744fe90a3f2bd01206fbf95ad9d8a8db6b66d84d"}}
name: Restore package cache
- run:
command: |2
@ -1185,7 +1185,7 @@ jobs:
command: ls -lahR .buildcache/packages
name: List packages
- save_cache:
key: package-e5c674f011608ff0ec51285ea342fe7a95f803e5-{{checksum ".buildcache/cache-keys/package-e5c674f011608ff0ec51285ea342fe7a95f803e5"}}
key: package-744fe90a3f2bd01206fbf95ad9d8a8db6b66d84d-{{checksum ".buildcache/cache-keys/package-744fe90a3f2bd01206fbf95ad9d8a8db6b66d84d"}}
name: Save package cache
paths:
- .buildcache/packages/store
@ -1197,7 +1197,7 @@ jobs:
- AUTO_INSTALL_TOOLS: 'YES'
- BUILDKIT_PROGRESS: plain
- PRODUCT_REVISION: ''
- PACKAGE_SPEC_ID: d2dbe866712854d51e105c9b8b33ff5e22d1287c
- PACKAGE_SPEC_ID: 394f426443d3ce6446a86a2982253a2637c061e0
steps:
- setup_remote_docker:
docker_layer_caching: false
@ -1211,7 +1211,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-d2dbe866712854d51e105c9b8b33ff5e22d1287c-{{checksum ".buildcache/cache-keys/package-d2dbe866712854d51e105c9b8b33ff5e22d1287c"}}
key: package-394f426443d3ce6446a86a2982253a2637c061e0-{{checksum ".buildcache/cache-keys/package-394f426443d3ce6446a86a2982253a2637c061e0"}}
name: Restore package cache
- run:
command: |2
@ -1257,7 +1257,7 @@ jobs:
command: ls -lahR .buildcache/packages
name: List packages
- save_cache:
key: package-d2dbe866712854d51e105c9b8b33ff5e22d1287c-{{checksum ".buildcache/cache-keys/package-d2dbe866712854d51e105c9b8b33ff5e22d1287c"}}
key: package-394f426443d3ce6446a86a2982253a2637c061e0-{{checksum ".buildcache/cache-keys/package-394f426443d3ce6446a86a2982253a2637c061e0"}}
name: Save package cache
paths:
- .buildcache/packages/store
@ -1269,7 +1269,7 @@ jobs:
- AUTO_INSTALL_TOOLS: 'YES'
- BUILDKIT_PROGRESS: plain
- PRODUCT_REVISION: ''
- PACKAGE_SPEC_ID: 45fe69b6571284299223e8245f29c40ffd9c43dd
- PACKAGE_SPEC_ID: b9d9cc299dc96f89f71285c520b4cd244879207a
steps:
- setup_remote_docker:
docker_layer_caching: false
@ -1283,7 +1283,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-45fe69b6571284299223e8245f29c40ffd9c43dd-{{checksum ".buildcache/cache-keys/package-45fe69b6571284299223e8245f29c40ffd9c43dd"}}
key: package-b9d9cc299dc96f89f71285c520b4cd244879207a-{{checksum ".buildcache/cache-keys/package-b9d9cc299dc96f89f71285c520b4cd244879207a"}}
name: Restore package cache
- run:
command: |2
@ -1329,7 +1329,7 @@ jobs:
command: ls -lahR .buildcache/packages
name: List packages
- save_cache:
key: package-45fe69b6571284299223e8245f29c40ffd9c43dd-{{checksum ".buildcache/cache-keys/package-45fe69b6571284299223e8245f29c40ffd9c43dd"}}
key: package-b9d9cc299dc96f89f71285c520b4cd244879207a-{{checksum ".buildcache/cache-keys/package-b9d9cc299dc96f89f71285c520b4cd244879207a"}}
name: Save package cache
paths:
- .buildcache/packages/store
@ -1341,7 +1341,7 @@ jobs:
- AUTO_INSTALL_TOOLS: 'YES'
- BUILDKIT_PROGRESS: plain
- PRODUCT_REVISION: ''
- PACKAGE_SPEC_ID: c8401fcf82b86f62e39516bc847423296f351dcf
- PACKAGE_SPEC_ID: 2409a41c5c5a434b5dbf77d29deb31a24c825dea
steps:
- setup_remote_docker:
docker_layer_caching: false
@ -1355,7 +1355,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-c8401fcf82b86f62e39516bc847423296f351dcf-{{checksum ".buildcache/cache-keys/package-c8401fcf82b86f62e39516bc847423296f351dcf"}}
key: package-2409a41c5c5a434b5dbf77d29deb31a24c825dea-{{checksum ".buildcache/cache-keys/package-2409a41c5c5a434b5dbf77d29deb31a24c825dea"}}
name: Restore package cache
- run:
command: |2
@ -1401,7 +1401,7 @@ jobs:
command: ls -lahR .buildcache/packages
name: List packages
- save_cache:
key: package-c8401fcf82b86f62e39516bc847423296f351dcf-{{checksum ".buildcache/cache-keys/package-c8401fcf82b86f62e39516bc847423296f351dcf"}}
key: package-2409a41c5c5a434b5dbf77d29deb31a24c825dea-{{checksum ".buildcache/cache-keys/package-2409a41c5c5a434b5dbf77d29deb31a24c825dea"}}
name: Save package cache
paths:
- .buildcache/packages/store
@ -1413,7 +1413,7 @@ jobs:
- AUTO_INSTALL_TOOLS: 'YES'
- BUILDKIT_PROGRESS: plain
- PRODUCT_REVISION: ''
- PACKAGE_SPEC_ID: a60bb9fef193b738fd139723936b23ea38b18d25
- PACKAGE_SPEC_ID: 7f7bcfc7f39c7e62aeba7d94b345a3ebdf2ab7f8
steps:
- setup_remote_docker:
docker_layer_caching: false
@ -1427,7 +1427,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-a60bb9fef193b738fd139723936b23ea38b18d25-{{checksum ".buildcache/cache-keys/package-a60bb9fef193b738fd139723936b23ea38b18d25"}}
key: package-7f7bcfc7f39c7e62aeba7d94b345a3ebdf2ab7f8-{{checksum ".buildcache/cache-keys/package-7f7bcfc7f39c7e62aeba7d94b345a3ebdf2ab7f8"}}
name: Restore package cache
- run:
command: |2
@ -1473,7 +1473,7 @@ jobs:
command: ls -lahR .buildcache/packages
name: List packages
- save_cache:
key: package-a60bb9fef193b738fd139723936b23ea38b18d25-{{checksum ".buildcache/cache-keys/package-a60bb9fef193b738fd139723936b23ea38b18d25"}}
key: package-7f7bcfc7f39c7e62aeba7d94b345a3ebdf2ab7f8-{{checksum ".buildcache/cache-keys/package-7f7bcfc7f39c7e62aeba7d94b345a3ebdf2ab7f8"}}
name: Save package cache
paths:
- .buildcache/packages/store
@ -1488,7 +1488,7 @@ jobs:
name: Run SQL PgTap Tests
working_directory: ~/boundary
workflows:
build-591648cfcfc4fec2:
build-fd66eb9f99de501d:
jobs:
- build-common-layers
- darwin_amd64_package:

@ -136,49 +136,49 @@ jobs:
command: mkdir -p .buildcache && echo "*" > .buildcache/.gitignore
name: Ignore .buildcache
- restore_cache:
key: package-71fa601616a225899af09c8369ed9fc2d6d998e2-{{checksum ".buildcache/cache-keys/package-71fa601616a225899af09c8369ed9fc2d6d998e2"}}
key: package-ddd70200153c0e65302afe88a629b4440e8eebfa-{{checksum ".buildcache/cache-keys/package-ddd70200153c0e65302afe88a629b4440e8eebfa"}}
name: Restore package cache
- restore_cache:
key: package-97f48b65a2b1d85a686a4799e8aff6926cfbe04f-{{checksum ".buildcache/cache-keys/package-97f48b65a2b1d85a686a4799e8aff6926cfbe04f"}}
key: package-dca0ba02c8e7351c77442c86523f121a8c83039e-{{checksum ".buildcache/cache-keys/package-dca0ba02c8e7351c77442c86523f121a8c83039e"}}
name: Restore package cache
- restore_cache:
key: package-b6d16e35ea34fa5749ed92c8062e1cea96fd24f4-{{checksum ".buildcache/cache-keys/package-b6d16e35ea34fa5749ed92c8062e1cea96fd24f4"}}
key: package-72b242aa7004a2a706bd894df71c002671e8e425-{{checksum ".buildcache/cache-keys/package-72b242aa7004a2a706bd894df71c002671e8e425"}}
name: Restore package cache
- restore_cache:
key: package-c8401fcf82b86f62e39516bc847423296f351dcf-{{checksum ".buildcache/cache-keys/package-c8401fcf82b86f62e39516bc847423296f351dcf"}}
key: package-2409a41c5c5a434b5dbf77d29deb31a24c825dea-{{checksum ".buildcache/cache-keys/package-2409a41c5c5a434b5dbf77d29deb31a24c825dea"}}
name: Restore package cache
- restore_cache:
key: package-30ba4053218e6b6a3f39b9d67cc430f9385e70cb-{{checksum ".buildcache/cache-keys/package-30ba4053218e6b6a3f39b9d67cc430f9385e70cb"}}
key: package-1439f62f659d115a6675bef8de63d7a576370da6-{{checksum ".buildcache/cache-keys/package-1439f62f659d115a6675bef8de63d7a576370da6"}}
name: Restore package cache
- restore_cache:
key: package-809c3d22aa07bb5c66dca99d44efdd2bb37f2525-{{checksum ".buildcache/cache-keys/package-809c3d22aa07bb5c66dca99d44efdd2bb37f2525"}}
key: package-192814968c68e1c819d216b95925afb4099aaa07-{{checksum ".buildcache/cache-keys/package-192814968c68e1c819d216b95925afb4099aaa07"}}
name: Restore package cache
- restore_cache:
key: package-1efe1513c2b57d32392ed4a4601de1699c813457-{{checksum ".buildcache/cache-keys/package-1efe1513c2b57d32392ed4a4601de1699c813457"}}
key: package-b734ede0c13f22ffc1835626300d3b3226acaf53-{{checksum ".buildcache/cache-keys/package-b734ede0c13f22ffc1835626300d3b3226acaf53"}}
name: Restore package cache
- restore_cache:
key: package-384724ac89d695a73abb78aad60acd3c7c2c4270-{{checksum ".buildcache/cache-keys/package-384724ac89d695a73abb78aad60acd3c7c2c4270"}}
key: package-a1ae7292c28d5e629eea144b2fafb6f234ca4ffa-{{checksum ".buildcache/cache-keys/package-a1ae7292c28d5e629eea144b2fafb6f234ca4ffa"}}
name: Restore package cache
- restore_cache:
key: package-45fe69b6571284299223e8245f29c40ffd9c43dd-{{checksum ".buildcache/cache-keys/package-45fe69b6571284299223e8245f29c40ffd9c43dd"}}
key: package-b9d9cc299dc96f89f71285c520b4cd244879207a-{{checksum ".buildcache/cache-keys/package-b9d9cc299dc96f89f71285c520b4cd244879207a"}}
name: Restore package cache
- restore_cache:
key: package-e5c674f011608ff0ec51285ea342fe7a95f803e5-{{checksum ".buildcache/cache-keys/package-e5c674f011608ff0ec51285ea342fe7a95f803e5"}}
key: package-744fe90a3f2bd01206fbf95ad9d8a8db6b66d84d-{{checksum ".buildcache/cache-keys/package-744fe90a3f2bd01206fbf95ad9d8a8db6b66d84d"}}
name: Restore package cache
- restore_cache:
key: package-291ec533f804015a31ec89443c1100a2ae526856-{{checksum ".buildcache/cache-keys/package-291ec533f804015a31ec89443c1100a2ae526856"}}
key: package-489303d43b63583b456fef92eeef9806a25ef98d-{{checksum ".buildcache/cache-keys/package-489303d43b63583b456fef92eeef9806a25ef98d"}}
name: Restore package cache
- restore_cache:
key: package-d2dbe866712854d51e105c9b8b33ff5e22d1287c-{{checksum ".buildcache/cache-keys/package-d2dbe866712854d51e105c9b8b33ff5e22d1287c"}}
key: package-394f426443d3ce6446a86a2982253a2637c061e0-{{checksum ".buildcache/cache-keys/package-394f426443d3ce6446a86a2982253a2637c061e0"}}
name: Restore package cache
- restore_cache:
key: package-4571f72423cae365aaad07ec1a16fffe7aaa468e-{{checksum ".buildcache/cache-keys/package-4571f72423cae365aaad07ec1a16fffe7aaa468e"}}
key: package-e13e5614c5583baf504ebe29fbf99c21f9447926-{{checksum ".buildcache/cache-keys/package-e13e5614c5583baf504ebe29fbf99c21f9447926"}}
name: Restore package cache
- restore_cache:
key: package-93124fe6bd57406690ddeef8d33ee90d67f3eb04-{{checksum ".buildcache/cache-keys/package-93124fe6bd57406690ddeef8d33ee90d67f3eb04"}}
key: package-3fc99d62bf311ae70f4e4d9fd3f2a063691f00b9-{{checksum ".buildcache/cache-keys/package-3fc99d62bf311ae70f4e4d9fd3f2a063691f00b9"}}
name: Restore package cache
- restore_cache:
key: package-a60bb9fef193b738fd139723936b23ea38b18d25-{{checksum ".buildcache/cache-keys/package-a60bb9fef193b738fd139723936b23ea38b18d25"}}
key: package-7f7bcfc7f39c7e62aeba7d94b345a3ebdf2ab7f8-{{checksum ".buildcache/cache-keys/package-7f7bcfc7f39c7e62aeba7d94b345a3ebdf2ab7f8"}}
name: Restore package cache
- run:
command: make package-meta-all
@ -190,20 +190,20 @@ jobs:
command: ls -lahR .buildcache
name: List Build Cache
- run:
command: cp packages*.lock/pkgs.yml lockfile-591648cfcfc4fec2.yml
command: cp packages*.lock/pkgs.yml lockfile-fd66eb9f99de501d.yml
name: Update Lockfile Name
- run:
command: tar -czf packages-591648cfcfc4fec2.tar.gz .buildcache/packages lockfile-591648cfcfc4fec2.yml
command: tar -czf packages-fd66eb9f99de501d.tar.gz .buildcache/packages lockfile-fd66eb9f99de501d.yml
name: Create Raw Package Tarball
- run:
command: tar -czf meta-591648cfcfc4fec2.tar.gz .buildcache/packages/store/*.json lockfile-591648cfcfc4fec2.yml
command: tar -czf meta-fd66eb9f99de501d.tar.gz .buildcache/packages/store/*.json lockfile-fd66eb9f99de501d.yml
name: Create Metadata Tarball
- store_artifacts:
path: lockfile-591648cfcfc4fec2.yml
path: lockfile-fd66eb9f99de501d.yml
- store_artifacts:
path: packages-591648cfcfc4fec2.tar.gz
path: packages-fd66eb9f99de501d.tar.gz
- store_artifacts:
path: meta-591648cfcfc4fec2.tar.gz
path: meta-fd66eb9f99de501d.tar.gz
- store_artifacts:
path: .buildcache/packages
darwin_amd64_package:
@ -221,7 +221,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-71fa601616a225899af09c8369ed9fc2d6d998e2-{{checksum ".buildcache/cache-keys/package-71fa601616a225899af09c8369ed9fc2d6d998e2"}}
key: package-ddd70200153c0e65302afe88a629b4440e8eebfa-{{checksum ".buildcache/cache-keys/package-ddd70200153c0e65302afe88a629b4440e8eebfa"}}
name: Restore package cache
- run:
command: |2
@ -269,10 +269,10 @@ jobs:
- save_cache:
paths:
- .buildcache/packages/store
key: package-71fa601616a225899af09c8369ed9fc2d6d998e2-{{checksum ".buildcache/cache-keys/package-71fa601616a225899af09c8369ed9fc2d6d998e2"}}
key: package-ddd70200153c0e65302afe88a629b4440e8eebfa-{{checksum ".buildcache/cache-keys/package-ddd70200153c0e65302afe88a629b4440e8eebfa"}}
name: Save package cache
environment:
PACKAGE_SPEC_ID: 71fa601616a225899af09c8369ed9fc2d6d998e2
PACKAGE_SPEC_ID: ddd70200153c0e65302afe88a629b4440e8eebfa
freebsd_386_package:
executor: builder
steps:
@ -288,7 +288,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-97f48b65a2b1d85a686a4799e8aff6926cfbe04f-{{checksum ".buildcache/cache-keys/package-97f48b65a2b1d85a686a4799e8aff6926cfbe04f"}}
key: package-dca0ba02c8e7351c77442c86523f121a8c83039e-{{checksum ".buildcache/cache-keys/package-dca0ba02c8e7351c77442c86523f121a8c83039e"}}
name: Restore package cache
- run:
command: |2
@ -336,10 +336,10 @@ jobs:
- save_cache:
paths:
- .buildcache/packages/store
key: package-97f48b65a2b1d85a686a4799e8aff6926cfbe04f-{{checksum ".buildcache/cache-keys/package-97f48b65a2b1d85a686a4799e8aff6926cfbe04f"}}
key: package-dca0ba02c8e7351c77442c86523f121a8c83039e-{{checksum ".buildcache/cache-keys/package-dca0ba02c8e7351c77442c86523f121a8c83039e"}}
name: Save package cache
environment:
PACKAGE_SPEC_ID: 97f48b65a2b1d85a686a4799e8aff6926cfbe04f
PACKAGE_SPEC_ID: dca0ba02c8e7351c77442c86523f121a8c83039e
freebsd_amd64_package:
executor: builder
steps:
@ -355,7 +355,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-b6d16e35ea34fa5749ed92c8062e1cea96fd24f4-{{checksum ".buildcache/cache-keys/package-b6d16e35ea34fa5749ed92c8062e1cea96fd24f4"}}
key: package-72b242aa7004a2a706bd894df71c002671e8e425-{{checksum ".buildcache/cache-keys/package-72b242aa7004a2a706bd894df71c002671e8e425"}}
name: Restore package cache
- run:
command: |2
@ -403,10 +403,10 @@ jobs:
- save_cache:
paths:
- .buildcache/packages/store
key: package-b6d16e35ea34fa5749ed92c8062e1cea96fd24f4-{{checksum ".buildcache/cache-keys/package-b6d16e35ea34fa5749ed92c8062e1cea96fd24f4"}}
key: package-72b242aa7004a2a706bd894df71c002671e8e425-{{checksum ".buildcache/cache-keys/package-72b242aa7004a2a706bd894df71c002671e8e425"}}
name: Save package cache
environment:
PACKAGE_SPEC_ID: b6d16e35ea34fa5749ed92c8062e1cea96fd24f4
PACKAGE_SPEC_ID: 72b242aa7004a2a706bd894df71c002671e8e425
freebsd_arm_package:
executor: builder
steps:
@ -422,7 +422,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-c8401fcf82b86f62e39516bc847423296f351dcf-{{checksum ".buildcache/cache-keys/package-c8401fcf82b86f62e39516bc847423296f351dcf"}}
key: package-2409a41c5c5a434b5dbf77d29deb31a24c825dea-{{checksum ".buildcache/cache-keys/package-2409a41c5c5a434b5dbf77d29deb31a24c825dea"}}
name: Restore package cache
- run:
command: |2
@ -470,10 +470,10 @@ jobs:
- save_cache:
paths:
- .buildcache/packages/store
key: package-c8401fcf82b86f62e39516bc847423296f351dcf-{{checksum ".buildcache/cache-keys/package-c8401fcf82b86f62e39516bc847423296f351dcf"}}
key: package-2409a41c5c5a434b5dbf77d29deb31a24c825dea-{{checksum ".buildcache/cache-keys/package-2409a41c5c5a434b5dbf77d29deb31a24c825dea"}}
name: Save package cache
environment:
PACKAGE_SPEC_ID: c8401fcf82b86f62e39516bc847423296f351dcf
PACKAGE_SPEC_ID: 2409a41c5c5a434b5dbf77d29deb31a24c825dea
linux_386_package:
executor: builder
steps:
@ -489,7 +489,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-30ba4053218e6b6a3f39b9d67cc430f9385e70cb-{{checksum ".buildcache/cache-keys/package-30ba4053218e6b6a3f39b9d67cc430f9385e70cb"}}
key: package-1439f62f659d115a6675bef8de63d7a576370da6-{{checksum ".buildcache/cache-keys/package-1439f62f659d115a6675bef8de63d7a576370da6"}}
name: Restore package cache
- run:
command: |2
@ -537,10 +537,10 @@ jobs:
- save_cache:
paths:
- .buildcache/packages/store
key: package-30ba4053218e6b6a3f39b9d67cc430f9385e70cb-{{checksum ".buildcache/cache-keys/package-30ba4053218e6b6a3f39b9d67cc430f9385e70cb"}}
key: package-1439f62f659d115a6675bef8de63d7a576370da6-{{checksum ".buildcache/cache-keys/package-1439f62f659d115a6675bef8de63d7a576370da6"}}
name: Save package cache
environment:
PACKAGE_SPEC_ID: 30ba4053218e6b6a3f39b9d67cc430f9385e70cb
PACKAGE_SPEC_ID: 1439f62f659d115a6675bef8de63d7a576370da6
linux_amd64_package:
executor: builder
steps:
@ -556,7 +556,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-809c3d22aa07bb5c66dca99d44efdd2bb37f2525-{{checksum ".buildcache/cache-keys/package-809c3d22aa07bb5c66dca99d44efdd2bb37f2525"}}
key: package-192814968c68e1c819d216b95925afb4099aaa07-{{checksum ".buildcache/cache-keys/package-192814968c68e1c819d216b95925afb4099aaa07"}}
name: Restore package cache
- run:
command: |2
@ -604,10 +604,10 @@ jobs:
- save_cache:
paths:
- .buildcache/packages/store
key: package-809c3d22aa07bb5c66dca99d44efdd2bb37f2525-{{checksum ".buildcache/cache-keys/package-809c3d22aa07bb5c66dca99d44efdd2bb37f2525"}}
key: package-192814968c68e1c819d216b95925afb4099aaa07-{{checksum ".buildcache/cache-keys/package-192814968c68e1c819d216b95925afb4099aaa07"}}
name: Save package cache
environment:
PACKAGE_SPEC_ID: 809c3d22aa07bb5c66dca99d44efdd2bb37f2525
PACKAGE_SPEC_ID: 192814968c68e1c819d216b95925afb4099aaa07
linux_arm_package:
executor: builder
steps:
@ -623,7 +623,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-1efe1513c2b57d32392ed4a4601de1699c813457-{{checksum ".buildcache/cache-keys/package-1efe1513c2b57d32392ed4a4601de1699c813457"}}
key: package-b734ede0c13f22ffc1835626300d3b3226acaf53-{{checksum ".buildcache/cache-keys/package-b734ede0c13f22ffc1835626300d3b3226acaf53"}}
name: Restore package cache
- run:
command: |2
@ -671,10 +671,10 @@ jobs:
- save_cache:
paths:
- .buildcache/packages/store
key: package-1efe1513c2b57d32392ed4a4601de1699c813457-{{checksum ".buildcache/cache-keys/package-1efe1513c2b57d32392ed4a4601de1699c813457"}}
key: package-b734ede0c13f22ffc1835626300d3b3226acaf53-{{checksum ".buildcache/cache-keys/package-b734ede0c13f22ffc1835626300d3b3226acaf53"}}
name: Save package cache
environment:
PACKAGE_SPEC_ID: 1efe1513c2b57d32392ed4a4601de1699c813457
PACKAGE_SPEC_ID: b734ede0c13f22ffc1835626300d3b3226acaf53
linux_arm64_package:
executor: builder
steps:
@ -690,7 +690,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-384724ac89d695a73abb78aad60acd3c7c2c4270-{{checksum ".buildcache/cache-keys/package-384724ac89d695a73abb78aad60acd3c7c2c4270"}}
key: package-a1ae7292c28d5e629eea144b2fafb6f234ca4ffa-{{checksum ".buildcache/cache-keys/package-a1ae7292c28d5e629eea144b2fafb6f234ca4ffa"}}
name: Restore package cache
- run:
command: |2
@ -738,10 +738,10 @@ jobs:
- save_cache:
paths:
- .buildcache/packages/store
key: package-384724ac89d695a73abb78aad60acd3c7c2c4270-{{checksum ".buildcache/cache-keys/package-384724ac89d695a73abb78aad60acd3c7c2c4270"}}
key: package-a1ae7292c28d5e629eea144b2fafb6f234ca4ffa-{{checksum ".buildcache/cache-keys/package-a1ae7292c28d5e629eea144b2fafb6f234ca4ffa"}}
name: Save package cache
environment:
PACKAGE_SPEC_ID: 384724ac89d695a73abb78aad60acd3c7c2c4270
PACKAGE_SPEC_ID: a1ae7292c28d5e629eea144b2fafb6f234ca4ffa
netbsd_386_package:
executor: builder
steps:
@ -757,7 +757,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-45fe69b6571284299223e8245f29c40ffd9c43dd-{{checksum ".buildcache/cache-keys/package-45fe69b6571284299223e8245f29c40ffd9c43dd"}}
key: package-b9d9cc299dc96f89f71285c520b4cd244879207a-{{checksum ".buildcache/cache-keys/package-b9d9cc299dc96f89f71285c520b4cd244879207a"}}
name: Restore package cache
- run:
command: |2
@ -805,10 +805,10 @@ jobs:
- save_cache:
paths:
- .buildcache/packages/store
key: package-45fe69b6571284299223e8245f29c40ffd9c43dd-{{checksum ".buildcache/cache-keys/package-45fe69b6571284299223e8245f29c40ffd9c43dd"}}
key: package-b9d9cc299dc96f89f71285c520b4cd244879207a-{{checksum ".buildcache/cache-keys/package-b9d9cc299dc96f89f71285c520b4cd244879207a"}}
name: Save package cache
environment:
PACKAGE_SPEC_ID: 45fe69b6571284299223e8245f29c40ffd9c43dd
PACKAGE_SPEC_ID: b9d9cc299dc96f89f71285c520b4cd244879207a
netbsd_amd64_package:
executor: builder
steps:
@ -824,7 +824,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-e5c674f011608ff0ec51285ea342fe7a95f803e5-{{checksum ".buildcache/cache-keys/package-e5c674f011608ff0ec51285ea342fe7a95f803e5"}}
key: package-744fe90a3f2bd01206fbf95ad9d8a8db6b66d84d-{{checksum ".buildcache/cache-keys/package-744fe90a3f2bd01206fbf95ad9d8a8db6b66d84d"}}
name: Restore package cache
- run:
command: |2
@ -872,10 +872,10 @@ jobs:
- save_cache:
paths:
- .buildcache/packages/store
key: package-e5c674f011608ff0ec51285ea342fe7a95f803e5-{{checksum ".buildcache/cache-keys/package-e5c674f011608ff0ec51285ea342fe7a95f803e5"}}
key: package-744fe90a3f2bd01206fbf95ad9d8a8db6b66d84d-{{checksum ".buildcache/cache-keys/package-744fe90a3f2bd01206fbf95ad9d8a8db6b66d84d"}}
name: Save package cache
environment:
PACKAGE_SPEC_ID: e5c674f011608ff0ec51285ea342fe7a95f803e5
PACKAGE_SPEC_ID: 744fe90a3f2bd01206fbf95ad9d8a8db6b66d84d
openbsd_386_package:
executor: builder
steps:
@ -891,7 +891,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-291ec533f804015a31ec89443c1100a2ae526856-{{checksum ".buildcache/cache-keys/package-291ec533f804015a31ec89443c1100a2ae526856"}}
key: package-489303d43b63583b456fef92eeef9806a25ef98d-{{checksum ".buildcache/cache-keys/package-489303d43b63583b456fef92eeef9806a25ef98d"}}
name: Restore package cache
- run:
command: |2
@ -939,10 +939,10 @@ jobs:
- save_cache:
paths:
- .buildcache/packages/store
key: package-291ec533f804015a31ec89443c1100a2ae526856-{{checksum ".buildcache/cache-keys/package-291ec533f804015a31ec89443c1100a2ae526856"}}
key: package-489303d43b63583b456fef92eeef9806a25ef98d-{{checksum ".buildcache/cache-keys/package-489303d43b63583b456fef92eeef9806a25ef98d"}}
name: Save package cache
environment:
PACKAGE_SPEC_ID: 291ec533f804015a31ec89443c1100a2ae526856
PACKAGE_SPEC_ID: 489303d43b63583b456fef92eeef9806a25ef98d
openbsd_amd64_package:
executor: builder
steps:
@ -958,7 +958,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-d2dbe866712854d51e105c9b8b33ff5e22d1287c-{{checksum ".buildcache/cache-keys/package-d2dbe866712854d51e105c9b8b33ff5e22d1287c"}}
key: package-394f426443d3ce6446a86a2982253a2637c061e0-{{checksum ".buildcache/cache-keys/package-394f426443d3ce6446a86a2982253a2637c061e0"}}
name: Restore package cache
- run:
command: |2
@ -1006,10 +1006,10 @@ jobs:
- save_cache:
paths:
- .buildcache/packages/store
key: package-d2dbe866712854d51e105c9b8b33ff5e22d1287c-{{checksum ".buildcache/cache-keys/package-d2dbe866712854d51e105c9b8b33ff5e22d1287c"}}
key: package-394f426443d3ce6446a86a2982253a2637c061e0-{{checksum ".buildcache/cache-keys/package-394f426443d3ce6446a86a2982253a2637c061e0"}}
name: Save package cache
environment:
PACKAGE_SPEC_ID: d2dbe866712854d51e105c9b8b33ff5e22d1287c
PACKAGE_SPEC_ID: 394f426443d3ce6446a86a2982253a2637c061e0
solaris_amd64_package:
executor: builder
steps:
@ -1025,7 +1025,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-4571f72423cae365aaad07ec1a16fffe7aaa468e-{{checksum ".buildcache/cache-keys/package-4571f72423cae365aaad07ec1a16fffe7aaa468e"}}
key: package-e13e5614c5583baf504ebe29fbf99c21f9447926-{{checksum ".buildcache/cache-keys/package-e13e5614c5583baf504ebe29fbf99c21f9447926"}}
name: Restore package cache
- run:
command: |2
@ -1073,10 +1073,10 @@ jobs:
- save_cache:
paths:
- .buildcache/packages/store
key: package-4571f72423cae365aaad07ec1a16fffe7aaa468e-{{checksum ".buildcache/cache-keys/package-4571f72423cae365aaad07ec1a16fffe7aaa468e"}}
key: package-e13e5614c5583baf504ebe29fbf99c21f9447926-{{checksum ".buildcache/cache-keys/package-e13e5614c5583baf504ebe29fbf99c21f9447926"}}
name: Save package cache
environment:
PACKAGE_SPEC_ID: 4571f72423cae365aaad07ec1a16fffe7aaa468e
PACKAGE_SPEC_ID: e13e5614c5583baf504ebe29fbf99c21f9447926
windows_386_package:
executor: builder
steps:
@ -1092,7 +1092,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-93124fe6bd57406690ddeef8d33ee90d67f3eb04-{{checksum ".buildcache/cache-keys/package-93124fe6bd57406690ddeef8d33ee90d67f3eb04"}}
key: package-3fc99d62bf311ae70f4e4d9fd3f2a063691f00b9-{{checksum ".buildcache/cache-keys/package-3fc99d62bf311ae70f4e4d9fd3f2a063691f00b9"}}
name: Restore package cache
- run:
command: |2
@ -1140,10 +1140,10 @@ jobs:
- save_cache:
paths:
- .buildcache/packages/store
key: package-93124fe6bd57406690ddeef8d33ee90d67f3eb04-{{checksum ".buildcache/cache-keys/package-93124fe6bd57406690ddeef8d33ee90d67f3eb04"}}
key: package-3fc99d62bf311ae70f4e4d9fd3f2a063691f00b9-{{checksum ".buildcache/cache-keys/package-3fc99d62bf311ae70f4e4d9fd3f2a063691f00b9"}}
name: Save package cache
environment:
PACKAGE_SPEC_ID: 93124fe6bd57406690ddeef8d33ee90d67f3eb04
PACKAGE_SPEC_ID: 3fc99d62bf311ae70f4e4d9fd3f2a063691f00b9
windows_amd64_package:
executor: builder
steps:
@ -1159,7 +1159,7 @@ jobs:
command: make -C packages*.lock write-package-cache-key
name: Write package cache key
- restore_cache:
key: package-a60bb9fef193b738fd139723936b23ea38b18d25-{{checksum ".buildcache/cache-keys/package-a60bb9fef193b738fd139723936b23ea38b18d25"}}
key: package-7f7bcfc7f39c7e62aeba7d94b345a3ebdf2ab7f8-{{checksum ".buildcache/cache-keys/package-7f7bcfc7f39c7e62aeba7d94b345a3ebdf2ab7f8"}}
name: Restore package cache
- run:
command: |2
@ -1207,12 +1207,12 @@ jobs:
- save_cache:
paths:
- .buildcache/packages/store
key: package-a60bb9fef193b738fd139723936b23ea38b18d25-{{checksum ".buildcache/cache-keys/package-a60bb9fef193b738fd139723936b23ea38b18d25"}}
key: package-7f7bcfc7f39c7e62aeba7d94b345a3ebdf2ab7f8-{{checksum ".buildcache/cache-keys/package-7f7bcfc7f39c7e62aeba7d94b345a3ebdf2ab7f8"}}
name: Save package cache
environment:
PACKAGE_SPEC_ID: a60bb9fef193b738fd139723936b23ea38b18d25
PACKAGE_SPEC_ID: 7f7bcfc7f39c7e62aeba7d94b345a3ebdf2ab7f8
workflows:
build-591648cfcfc4fec2:
build-fd66eb9f99de501d:
jobs:
- build-common-layers: {}
- darwin_amd64_package:

@ -2,7 +2,7 @@
Canonical reference for changes, improvements, and bugfixes for Boundary.
## Next
## 0.6.1 (2021/09/14)
### Bug Fixes
@ -10,6 +10,10 @@ Canonical reference for changes, improvements, and bugfixes for Boundary.
`managed-group` would not be accepted as specific `type` values in grant
strings. Also, fix authorized actions not showing `credential-store` values in
project scope output. ([PR](https://github.com/hashicorp/boundary/pull/1524))
* actions: Fix `sessions` collection actions not being visible when reading a
scope ([PR](https://github.com/hashicorp/boundary/pull/1527))
* credential stores: Fix credential stores not showing authorized collection
actions ([PR](https://github.com/hashicorp/boundary/pull/1530))
## 0.6.0 (2021/09/03)

@ -181,7 +181,7 @@ install-go:
# Docker build and publish variables and targets
REGISTRY_NAME?=docker.io/hashicorp
IMAGE_NAME=boundary
VERSION?=0.6.0
VERSION?=0.6.1
IMAGE_TAG=$(REGISTRY_NAME)/$(IMAGE_NAME):$(VERSION)
IMAGE_TAG_DEV=$(REGISTRY_NAME)/$(IMAGE_NAME):latest-$(shell git rev-parse --short HEAD)
DOCKER_DIR=./docker

@ -1,6 +1,6 @@
FROM docker.mirror.hashicorp.services/alpine:3.13
ARG VERSION=0.6.0
ARG VERSION=0.6.1
LABEL name="Boundary" \
maintainer="HashiCorp Boundary Team <boundary@hashicorp.com>" \

@ -87,3 +87,31 @@ type Revoker interface {
// Revoke revokes the dynamic credentials issued for sessionid.
Revoke(ctx context.Context, sessionId string) error
}
// Password represents a secret password.
type Password string
// PrivateKey represents a secret private key.
type PrivateKey []byte
// UserPassword is a credential containing a username and a password.
type UserPassword interface {
Credential
Username() string
Password() Password
}
// KeyPair is a credential containing a username and a private key.
type KeyPair interface {
Credential
Username() string
Private() PrivateKey
}
// Certificate is a credential containing a certificate and the private key
// for the certificate.
type Certificate interface {
Credential
Certificate() []byte
Private() PrivateKey
}

@ -0,0 +1,39 @@
package credential
import "encoding/json"
const (
redactedPassword = "[REDACTED: password]"
redactedPrivateKey = "[REDACTED: private key]"
)
// String returns a string with the password redacted.
func (s Password) String() string {
return redactedPassword
}
// GoString returns a string with the password redacted.
func (s Password) GoString() string {
return redactedPassword
}
// MarshalJSON returns a JSON-encoded string with the password redacted.
func (s Password) MarshalJSON() ([]byte, error) {
return json.Marshal(redactedPassword)
}
// String returns a string with the private key redacted.
func (s PrivateKey) String() string {
return redactedPrivateKey
}
// GoString returns a string with the private key redacted.
func (s PrivateKey) GoString() string {
return redactedPrivateKey
}
// MarshalJSON returns a JSON-encoded byte slice with the private key
// redacted.
func (s PrivateKey) MarshalJSON() ([]byte, error) {
return json.Marshal([]byte(redactedPrivateKey))
}

@ -0,0 +1,132 @@
package credential
import (
"encoding/json"
"fmt"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestPassword_String(t *testing.T) {
t.Parallel()
t.Run("redacted", func(t *testing.T) {
assert := assert.New(t)
const want = redactedPassword
passwd := Password("special secret")
assert.Equalf(want, passwd.String(), "Password.String() = %v, want %v", passwd.String(), want)
// Verify stringer is called
s := fmt.Sprintf("%s", passwd)
assert.Equalf(want, s, "Password.String() = %v, want %v", s, want)
})
}
func TestPassword_GoString(t *testing.T) {
t.Parallel()
t.Run("redacted", func(t *testing.T) {
assert := assert.New(t)
const want = redactedPassword
passwd := Password("magic secret")
assert.Equalf(want, passwd.GoString(), "Password.GoString() = %v, want %v", passwd.GoString(), want)
// Verify gostringer is called
s := fmt.Sprintf("%#v", passwd)
assert.Equalf(want, s, "Password.GoString() = %v, want %v", s, want)
})
}
func TestPassword_MarshalJSON(t *testing.T) {
t.Parallel()
t.Run("redacted", func(t *testing.T) {
assert, require := assert.New(t), require.New(t)
want, err := json.Marshal(redactedPassword)
require.NoError(err)
passwd := Password("normal secret")
got, err := passwd.MarshalJSON()
require.NoError(err)
assert.Equalf(want, got, "Password.MarshalJSON() = %s, want %s", got, want)
})
t.Run("within-struct", func(t *testing.T) {
assert, require := assert.New(t), require.New(t)
want := fmt.Sprintf(`%s`, redactedPassword)
type secretContainer struct {
P Password
S string
}
testB := "my secret"
secret := secretContainer{P: Password(testB), S: testB}
m, err := json.Marshal(secret)
require.NoError(err)
var sec secretContainer
err = json.Unmarshal(m, &sec)
require.NoError(err)
assert.Equal(Password(want), sec.P)
assert.Equal(testB, sec.S)
})
}
func TestPrivateKey_String(t *testing.T) {
t.Parallel()
t.Run("redacted", func(t *testing.T) {
assert := assert.New(t)
const want = redactedPrivateKey
pk := PrivateKey("special secret")
assert.Equalf(want, pk.String(), "PrivateKey.String() = %v, want %v", pk.String(), want)
// Verify stringer is called
s := fmt.Sprintf("%s", pk)
assert.Equalf(want, s, "PrivateKey.String() = %v, want %v", s, want)
})
}
func TestPrivateKey_GoString(t *testing.T) {
t.Parallel()
t.Run("redacted", func(t *testing.T) {
assert := assert.New(t)
const want = redactedPrivateKey
pk := PrivateKey("magic secret")
assert.Equalf(want, pk.GoString(), "PrivateKey.GoString() = %v, want %v", pk.GoString(), want)
// Verify gostringer is called
s := fmt.Sprintf("%#v", pk)
assert.Equalf(want, s, "PrivateKey.GoString() = %v, want %v", s, want)
})
}
func TestPrivateKey_MarshalJSON(t *testing.T) {
t.Parallel()
t.Run("redacted", func(t *testing.T) {
assert, require := assert.New(t), require.New(t)
want, err := json.Marshal([]byte(redactedPrivateKey))
require.NoError(err)
pk := PrivateKey("normal secret")
got, err := pk.MarshalJSON()
require.NoError(err)
assert.Equalf(want, got, "PrivateKey.MarshalJSON() = %s, want %s", got, want)
})
t.Run("within-struct", func(t *testing.T) {
assert, require := assert.New(t), require.New(t)
want := fmt.Sprintf(`%s`, redactedPrivateKey)
type secretContainer struct {
S PrivateKey
B []byte
}
testB := []byte("my secret")
secret := secretContainer{S: testB, B: testB}
m, err := json.Marshal(secret)
require.NoError(err)
var sec secretContainer
err = json.Unmarshal(m, &sec)
require.NoError(err)
assert.Equal(PrivateKey(want), sec.S)
assert.Equal(testB, sec.B)
})
}

@ -0,0 +1,129 @@
begin;
-- replaces check from internal/db/schema/migrations/postgres/0/60_wh_domain_types.up.sql
alter domain wh_public_id drop constraint wh_public_id_check;
alter domain wh_public_id add constraint wh_public_id_check
check(
value = 'None'
or
value = 'Unknown'
or
length(trim(value)) > 10
);
create table wh_credential_dimension (
-- random id generated using encode(digest(gen_random_bytes(16), 'sha256'), 'base64')
-- this is done to prevent conflicts with rows in other clusters
-- which enables warehouse data from multiple clusters to be loaded into a
-- single database instance
key wh_dim_key primary key default wh_dim_key(),
credential_purpose wh_dim_text,
credential_library_id wh_public_id not null,
credential_library_type wh_dim_text,
credential_library_name wh_dim_text,
credential_library_description wh_dim_text,
credential_library_vault_path wh_dim_text,
credential_library_vault_http_method wh_dim_text,
credential_library_vault_http_request_body wh_dim_text,
credential_store_id wh_public_id not null,
credential_store_type wh_dim_text,
credential_store_name wh_dim_text,
credential_store_description wh_dim_text,
credential_store_vault_namespace wh_dim_text,
credential_store_vault_address wh_dim_text,
target_id wh_public_id not null,
target_type wh_dim_text,
target_name wh_dim_text,
target_description wh_dim_text,
target_default_port_number integer not null,
target_session_max_seconds integer not null,
target_session_connection_limit integer not null,
project_id wt_scope_id not null,
project_name wh_dim_text,
project_description wh_dim_text,
organization_id wt_scope_id not null,
organization_name wh_dim_text,
organization_description wh_dim_text,
current_row_indicator wh_dim_text,
row_effective_time wh_timestamp,
row_expiration_time wh_timestamp
);
-- https://www.postgresql.org/docs/current/indexes-partial.html
create unique index wh_credential_dim_current_constraint
on wh_credential_dimension (credential_library_id, credential_store_id, target_id, credential_purpose)
where current_row_indicator = 'Current';
-- One part of a bridge table to associated the set of wh_credential_dimension with a fact table.
-- The other part of the bridge is wh_credential_group_membership.
create table wh_credential_group (
-- random id generated using encode(digest(gen_random_bytes(16), 'sha256'), 'base64')
-- this is done to prevent conflicts with rows in other clusters
-- which enables warehouse data from multiple clusters to be loaded into a
-- single database instance
key wh_dim_key primary key default wh_dim_key()
);
-- The second part of the bridge table. The other part is wh_credential_group.
create table wh_credential_group_membership (
credential_group_key wh_dim_key not null
references wh_credential_group (key)
on delete restrict
on update cascade,
credential_key wh_dim_key not null
references wh_credential_dimension (key)
on delete restrict
on update cascade
);
-- Add "no credentials" and "Unknown" group an dimension.
-- When a session has no credentials "no credentials" is used as the "None" value.
-- "Unknown" is used for existing data prior to the credential_dimension existing.
insert into wh_credential_group
(key)
values
('no credentials'),
('Unknown');
insert into wh_credential_dimension (
key,
credential_purpose,
credential_library_id, credential_library_type, credential_library_name, credential_library_description, credential_library_vault_path, credential_library_vault_http_method, credential_library_vault_http_request_body,
credential_store_id, credential_store_type, credential_store_name, credential_store_description, credential_store_vault_namespace, credential_store_vault_address,
target_id, target_type, target_name, target_description, target_default_port_number, target_session_max_seconds, target_session_connection_limit,
project_id, project_name, project_description,
organization_id, organization_name, organization_description,
current_row_indicator, row_effective_time, row_expiration_time
)
values
(
'no credential',
'None',
'None', 'None', 'None', 'None', 'None', 'None', 'None',
'None', 'None', 'None', 'None', 'None', 'None',
'None', 'None', 'None', 'None', -1, -1, -1,
'00000000000', 'None', 'None',
'00000000000', 'None', 'None',
'Current', now(), 'infinity'::timestamptz
),
(
'Unknown',
'Unknown',
'Unknown', 'Unknown', 'Unknown', 'Unknown', 'Unknown', 'Unknown', 'Unknown',
'Unknown', 'Unknown', 'Unknown', 'Unknown', 'Unknown', 'Unknown',
'Unknown', 'Unknown', 'Unknown', 'Unknown', -1, -1, -1,
'00000000000', 'Unknown', 'Unknown',
'00000000000', 'Unknown', 'Unknown',
'Current', now(), 'infinity'::timestamptz
);
insert into wh_credential_group_membership
(credential_group_key, credential_key)
values
('no credentials', 'no credential'),
('Unknown', 'Unknown');
commit;

@ -0,0 +1,100 @@
begin;
-- The whx_credential_dimension_source and whx_credential_dimension_target views are used
-- by an insert trigger to determine if the current row for the dimension has
-- changed and a new one needs to be inserted. The first column in the target view
-- must be the current warehouse id and all remaining columns must match the columns
-- in the source view.
-- The whx_credential_dimension_source view shows the current values in the
-- operational tables of the credential dimension.
create view whx_credential_dimension_source as
select -- id is the first column in the target view
s.public_id as session_id,
coalesce(scd.credential_purpose, 'None') as credential_purpose,
cl.public_id as credential_library_id,
case
when vcl is null then 'None'
else 'vault credential library'
end as credential_library_type,
coalesce(vcl.name, 'None') as credential_library_name,
coalesce(vcl.description, 'None') as credential_library_description,
coalesce(vcl.vault_path, 'None') as credential_library_vault_path,
coalesce(vcl.http_method, 'None') as credential_library_vault_http_method,
coalesce(vcl.http_request_body, 'None') as credential_library_vault_http_request_body,
cs.public_id as credential_store_id,
case
when vcs is null then 'None'
else 'vault credential store'
end as credential_store_type,
coalesce(vcs.name, 'None') as credential_store_name,
coalesce(vcs.description, 'None') as credential_store_description,
coalesce(vcs.namespace, 'None') as credential_store_vault_namespace,
coalesce(vcs.vault_address, 'None') as credential_store_vault_address,
t.public_id as target_id,
'tcp target' as target_type,
coalesce(tt.name, 'None') as target_name,
coalesce(tt.description, 'None') as target_description,
coalesce(tt.default_port, 0) as target_default_port_number,
tt.session_max_seconds as target_session_max_seconds,
tt.session_connection_limit as target_session_connection_limit,
p.public_id as project_id,
coalesce(p.name, 'None') as project_name,
coalesce(p.description, 'None') as project_description,
o.public_id as organization_id,
coalesce(o.name, 'None') as organization_name,
coalesce(o.description, 'None') as organization_description
from session_credential_dynamic as scd,
session as s,
credential_library as cl,
credential_store as cs,
credential_vault_library as vcl,
credential_vault_store as vcs,
target as t,
target_tcp as tt,
iam_scope as p,
iam_scope as o
where scd.library_id = cl.public_id
and cl.store_id = cs.public_id
and vcl.public_id = cl.public_id
and vcs.public_id = cs.public_id
and s.public_id = scd.session_id
and s.target_id = t.public_id
and t.public_id = tt.public_id
and p.public_id = t.scope_id
and p.type = 'project'
and o.public_id = p.parent_id
and o.type = 'org';
create view whx_credential_dimension_target as
select key,
credential_purpose,
credential_library_id,
credential_library_type,
credential_library_name,
credential_library_description,
credential_library_vault_path,
credential_library_vault_http_method,
credential_library_vault_http_request_body,
credential_store_id,
credential_store_type,
credential_store_name,
credential_store_description,
credential_store_vault_namespace,
credential_store_vault_address,
target_id,
target_type,
target_name,
target_description,
target_default_port_number,
target_session_max_seconds,
target_session_connection_limit,
project_id,
project_name,
project_description,
organization_id,
organization_name,
organization_description
from wh_credential_dimension
where current_row_indicator = 'Current'
;
commit;

@ -0,0 +1,170 @@
begin;
-- wh_upsert_credential_dimension compares the current vaules in the wh_credential_dimension
-- with the current values in the operational tables for the given parameters. IF the values
-- between operational tables and the wh_credential_dimension differ, a new row is inserted in
-- the wh_credential_dimension to match the current values in the operational tables.
create function wh_upsert_credential_dimension(p_session_id wt_public_id, p_library_id wt_public_id, p_credential_purpose wh_dim_text)
returns wh_dim_key
as $$
declare
src whx_credential_dimension_target%rowtype;
target whx_credential_dimension_target%rowtype;
new_row wh_credential_dimension%rowtype;
t_id wt_public_id;
begin
select s.target_id into strict t_id
from session as s
where s.public_id = p_session_id;
select * into target
from whx_credential_dimension_target as t
where t.credential_library_id = p_library_id
and t.target_id = t_id
and t.credential_purpose = p_credential_purpose;
select
target.key, t.credential_purpose,
t.credential_library_id, t.credential_library_type, t.credential_library_name, t.credential_library_description, t.credential_library_vault_path, t.credential_library_vault_http_method, t.credential_library_vault_http_request_body,
t.credential_store_id, t.credential_store_type, t.credential_store_name, t.credential_store_description, t.credential_store_vault_namespace, t.credential_store_vault_address,
t.target_id, t.target_type, t.target_name, t.target_description, t.target_default_port_number, t.target_session_max_seconds, t.target_session_connection_limit,
t.project_id, t.project_name, t.project_description,
t.organization_id, t.organization_name, t.organization_description
into src
from whx_credential_dimension_source as t
where t.credential_library_id = p_library_id
and t.session_id = p_session_id
and t.target_id = t_id
and t.credential_purpose = p_credential_purpose;
if src is distinct from target then
update wh_credential_dimension
set current_row_indicator = 'Expired',
row_expiration_time = current_timestamp
where credential_library_id = p_library_id
and target_id = t_id
and credential_purpose = p_credential_purpose
and current_row_indicator = 'Current';
insert into wh_credential_dimension (
credential_purpose,
credential_library_id, credential_library_type, credential_library_name, credential_library_description, credential_library_vault_path, credential_library_vault_http_method, credential_library_vault_http_request_body,
credential_store_id, credential_store_type, credential_store_name, credential_store_description, credential_store_vault_namespace, credential_store_vault_address,
target_id, target_type, target_name, target_description, target_default_port_number, target_session_max_seconds, target_session_connection_limit,
project_id, project_name, project_description,
organization_id, organization_name, organization_description,
current_row_indicator, row_effective_time, row_expiration_time
)
select credential_purpose,
credential_library_id, credential_library_type, credential_library_name, credential_library_description, credential_library_vault_path, credential_library_vault_http_method, credential_library_vault_http_request_body,
credential_store_id, credential_store_type, credential_store_name, credential_store_description, credential_store_vault_namespace, credential_store_vault_address,
target_id, target_type, target_name, target_description, target_default_port_number, target_session_max_seconds, target_session_connection_limit,
project_id, project_name, project_description,
organization_id, organization_name, organization_description,
'Current', current_timestamp, 'infinity'::timestamptz
from whx_credential_dimension_source
where credential_library_id = p_library_id
and session_id = p_session_id
and target_id = t_id
and credential_purpose = p_credential_purpose
returning * into new_row;
return new_row.key;
end if;
return target.key;
end
$$ language plpgsql;
-- Run wh_upsert_credential_dimension for session_credential_dynamic row that is inserted.
create function wh_insert_session_credential_dynamic()
returns trigger
as $$
begin
perform wh_upsert_credential_dimension(new.session_id, new.library_id, new.credential_purpose);
return null;
end;
$$ language plpgsql;
create trigger wh_insert_session_credential_dynamic
after insert on session_credential_dynamic
for each row
execute function wh_insert_session_credential_dynamic();
-- wh_upsert_credentail_group determines if a new wh_credential_group needs to be
-- created due to changes to the coresponding wh_credential_dimensions. It then
-- updates the wh_session_accumulating_fact to associate it with the correct wh_credential_group.
create function wh_upsert_credentail_group()
returns trigger
as $$
declare
cg_key wh_dim_key;
t_id wt_public_id;
s_id wt_public_id;
c_key wh_dim_key;
begin
select distinct scd.session_id into strict s_id
from new_table as scd;
select distinct s.target_id into strict t_id
from new_table as scd
left join session as s on s.public_id = scd.session_id;
-- based on query written by Michele Gaffney
with
credential_list (key) as (
select key
from wh_credential_dimension
where target_id = t_id
and credential_library_id in (select credential_library_id from new_table)
)
select distinct credential_group_key into cg_key
from wh_credential_group_membership a
where a.credential_key in (select key from credential_list)
and (select count(key) from credential_list) =
(
select count(b.credential_key)
from wh_credential_group_membership b
where a.credential_key = b.credential_key
and b.credential_key in (select key from credential_list)
)
and not exists
(
select 1
from wh_credential_group_membership b
where a.credential_key = b.credential_key
and b.credential_key not in (select key from credential_list)
)
;
if cg_key is null then
insert into wh_credential_group default values returning key into cg_key;
for c_key in
select key
from wh_credential_dimension
where target_id = t_id
and credential_library_id in (select credential_library_id from new_table)
loop
insert into wh_credential_group_membership
(credential_group_key, credential_key)
values
(cg_key, c_key);
end loop;
end if;
update wh_session_connection_accumulating_fact
set credential_group_key = cg_key
where session_id = s_id;
return null;
end;
$$ language plpgsql;
-- Run wh_upsert_credentail_group on statement. This assumes that all relevant
-- session_credential_dynamic rows are inserted as a single statement and that
-- the wh_insert_session_credential_dynamic trigger ran for each row and updated
-- the wh_credential_dimensions. Then this statement trigger can run to update the
-- bridge tables and wh_session_accumulating_fact.
create trigger wh_insert_stmt_session_credential_dynamic
after insert on session_credential_dynamic
referencing new table as new_table
for each statement
execute function wh_upsert_credentail_group();
commit;

@ -0,0 +1,54 @@
begin;
alter table wh_session_accumulating_fact
add column credential_group_key wh_dim_key not null
default 'Unknown'
references wh_credential_group (key)
on delete restrict
on update cascade;
alter table wh_session_accumulating_fact
alter column credential_group_key drop default;
-- replaces function from 15/01_wh_rename_key_columns.up.sql
drop trigger wh_insert_session on session;
drop function wh_insert_session;
create function wh_insert_session()
returns trigger
as $$
declare
new_row wh_session_accumulating_fact%rowtype;
begin
with
pending_timestamp (date_dim_key, time_dim_key, ts) as (
select wh_date_key(start_time), wh_time_key(start_time), start_time
from session_state
where session_id = new.public_id
and state = 'pending'
)
insert into wh_session_accumulating_fact (
session_id,
auth_token_id,
host_key,
user_key,
credential_group_key,
session_pending_date_key,
session_pending_time_key,
session_pending_time
)
select new.public_id,
new.auth_token_id,
wh_upsert_host(new.host_id, new.host_set_id, new.target_id),
wh_upsert_user(new.user_id, new.auth_token_id),
'no credentials', -- will be updated by wh_upsert_credentail_group
pending_timestamp.date_dim_key,
pending_timestamp.time_dim_key,
pending_timestamp.ts
from pending_timestamp
returning * into strict new_row;
return null;
end;
$$ language plpgsql;
create trigger wh_insert_session
after insert on session
for each row
execute function wh_insert_session();
commit;

@ -0,0 +1,75 @@
begin;
alter table wh_session_connection_accumulating_fact
add column credential_group_key wh_dim_key not null
default 'Unknown'
references wh_credential_group (key)
on delete restrict
on update cascade;
alter table wh_session_connection_accumulating_fact
alter column credential_group_key drop default;
-- replaces function from 15/01_wh_rename_key_columns.up.sql
drop trigger wh_insert_session_connection on session_connection;
drop function wh_insert_session_connection;
create function wh_insert_session_connection()
returns trigger
as $$
declare
new_row wh_session_connection_accumulating_fact%rowtype;
begin
with
authorized_timestamp (date_dim_key, time_dim_key, ts) as (
select wh_date_key(start_time), wh_time_key(start_time), start_time
from session_connection_state
where connection_id = new.public_id
and state = 'authorized'
),
session_dimension (host_dim_key, user_dim_key, credential_group_dim_key) as (
select host_key, user_key, credential_group_key
from wh_session_accumulating_fact
where session_id = new.session_id
)
insert into wh_session_connection_accumulating_fact (
connection_id,
session_id,
host_key,
user_key,
credential_group_key,
connection_authorized_date_key,
connection_authorized_time_key,
connection_authorized_time,
client_tcp_address,
client_tcp_port_number,
endpoint_tcp_address,
endpoint_tcp_port_number,
bytes_up,
bytes_down
)
select new.public_id,
new.session_id,
session_dimension.host_dim_key,
session_dimension.user_dim_key,
session_dimension.credential_group_dim_key,
authorized_timestamp.date_dim_key,
authorized_timestamp.time_dim_key,
authorized_timestamp.ts,
new.client_tcp_address,
new.client_tcp_port,
new.endpoint_tcp_address,
new.endpoint_tcp_port,
new.bytes_up,
new.bytes_down
from authorized_timestamp,
session_dimension
returning * into strict new_row;
perform wh_rollup_connections(new.session_id);
return null;
end;
$$ language plpgsql;
create trigger wh_insert_session_connection
after insert on session_connection
for each row
execute function wh_insert_session_connection();
commit;

@ -0,0 +1,189 @@
package migration
import (
"context"
"database/sql"
"testing"
"github.com/hashicorp/boundary/internal/auth/oidc"
"github.com/hashicorp/boundary/internal/authtoken"
"github.com/hashicorp/boundary/internal/credential"
"github.com/hashicorp/boundary/internal/credential/vault"
"github.com/hashicorp/boundary/internal/db"
"github.com/hashicorp/boundary/internal/db/schema"
"github.com/hashicorp/boundary/internal/host/static"
"github.com/hashicorp/boundary/internal/iam"
"github.com/hashicorp/boundary/internal/kms"
"github.com/hashicorp/boundary/internal/session"
"github.com/hashicorp/boundary/internal/target"
"github.com/hashicorp/boundary/testing/dbtest"
wrapping "github.com/hashicorp/go-kms-wrapping"
"github.com/jinzhu/gorm"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestMigrations_CredentialDimension(t *testing.T) {
const (
priorMigration = 15002
currentMigration = 16005
)
t.Parallel()
assert, require := assert.New(t), require.New(t)
ctx := context.Background()
dialect := dbtest.Postgres
c, u, _, err := dbtest.StartUsingTemplate(dialect, dbtest.WithTemplate(dbtest.Template1))
require.NoError(err)
t.Cleanup(func() {
require.NoError(c())
})
d, err := sql.Open(dialect, u)
require.NoError(err)
// migration to the prior migration (before the one we want to test)
oState := schema.TestCloneMigrationStates(t)
nState := schema.TestCreatePartialMigrationState(oState["postgres"], priorMigration)
oState["postgres"] = nState
m, err := schema.NewManager(ctx, dialect, d, schema.WithMigrationStates(oState))
require.NoError(err)
assert.NoError(m.RollForward(ctx))
state, err := m.CurrentState(ctx)
require.NoError(err)
assert.Equal(priorMigration, state.DatabaseSchemaVersion)
assert.False(state.Dirty)
// okay, now we can seed the database with test data
conn, err := gorm.Open(dialect, u)
require.NoError(err)
rw := db.New(conn)
wrapper := db.TestWrapper(t)
org, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper))
require.NotNil(prj)
assert.NotEmpty(prj.GetPublicId())
hc := static.TestCatalogs(t, conn, prj.GetPublicId(), 1)[0]
hs := static.TestSets(t, conn, hc.GetPublicId(), 1)[0]
h := static.TestHosts(t, conn, hc.GetPublicId(), 1)[0]
static.TestSetMembers(t, conn, hs.GetPublicId(), []*static.Host{h})
tar := target.TestTcpTarget(t, conn, prj.GetPublicId(), "test", target.WithHostSources([]string{hs.GetPublicId()}))
var sessions []*session.Session
kmsCache := kms.TestKms(t, conn, wrapper)
databaseWrapper, err := kmsCache.GetWrapper(ctx, org.GetPublicId(), kms.KeyPurposeDatabase)
require.NoError(err)
{
at := authtoken.TestAuthToken(t, conn, kmsCache, org.GetPublicId())
uId := at.GetIamUserId()
sess := session.TestSession(t, conn, wrapper, session.ComposedOf{
UserId: uId,
HostId: h.GetPublicId(),
TargetId: tar.GetPublicId(),
HostSetId: hs.GetPublicId(),
AuthTokenId: at.GetPublicId(),
ScopeId: prj.GetPublicId(),
Endpoint: "tcp://127.0.0.1:22",
})
sessions = append(sessions, sess)
}
{
at := testOidcAuthToken(t, conn, kmsCache, databaseWrapper, org.GetPublicId())
uId := at.GetIamUserId()
creds := testSessionCredentialParams(t, conn, kmsCache, wrapper, tar)
sess := session.TestSession(t, conn, wrapper, session.ComposedOf{
UserId: uId,
HostId: h.GetPublicId(),
TargetId: tar.GetPublicId(),
HostSetId: hs.GetPublicId(),
AuthTokenId: at.GetPublicId(),
ScopeId: prj.GetPublicId(),
Endpoint: "tcp://127.0.0.1:22",
DynamicCredentials: creds,
})
sessions = append(sessions, sess)
}
sessionRepo, err := session.NewRepository(rw, rw, kmsCache)
require.NoError(err)
count, err := sessionRepo.TerminateCompletedSessions(ctx)
assert.NoError(err)
assert.Zero(count)
for _, sess := range sessions {
// call TerminateSession
_, err = sessionRepo.TerminateSession(ctx, sess.GetPublicId(), 1, session.ClosedByUser)
assert.NoError(err)
}
// now we're ready for the migration we want to test.
oState = schema.TestCloneMigrationStates(t)
nState = schema.TestCreatePartialMigrationState(oState["postgres"], currentMigration)
oState["postgres"] = nState
m, err = schema.NewManager(ctx, dialect, d, schema.WithMigrationStates(oState))
require.NoError(err)
assert.NoError(m.RollForward(ctx))
state, err = m.CurrentState(ctx)
require.NoError(err)
assert.Equal(currentMigration, state.DatabaseSchemaVersion)
assert.False(state.Dirty)
}
func testOidcAuthToken(t *testing.T, conn *gorm.DB, kms *kms.Kms, wrapper wrapping.Wrapper, scopeId string) *authtoken.AuthToken {
t.Helper()
authMethod := oidc.TestAuthMethod(
t, conn, wrapper, scopeId, oidc.ActivePrivateState,
"alice-rp", "fido",
oidc.WithIssuer(oidc.TestConvertToUrls(t, "https://www.alice.com")[0]),
oidc.WithSigningAlgs(oidc.RS256),
oidc.WithApiUrl(oidc.TestConvertToUrls(t, "https://www.alice.com/callback")[0]),
)
acct := oidc.TestAccount(t, conn, authMethod, "test-subject")
ctx := context.Background()
rw := db.New(conn)
iamRepo, err := iam.NewRepository(rw, rw, kms)
require.NoError(t, err)
u := iam.TestUser(t, iamRepo, scopeId, iam.WithAccountIds(acct.PublicId))
repo, err := authtoken.NewRepository(rw, rw, kms)
require.NoError(t, err)
at, err := repo.CreateAuthToken(ctx, u, acct.GetPublicId())
require.NoError(t, err)
return at
}
func testSessionCredentialParams(t *testing.T, conn *gorm.DB, kms *kms.Kms, wrapper wrapping.Wrapper, tar *target.TcpTarget) []*session.DynamicCredential {
t.Helper()
rw := db.New(conn)
ctx := context.Background()
stores := vault.TestCredentialStores(t, conn, wrapper, tar.ScopeId, 1)
libs := vault.TestCredentialLibraries(t, conn, wrapper, stores[0].GetPublicId(), 2)
targetRepo, err := target.NewRepository(rw, rw, kms)
require.NoError(t, err)
_, _, _, err = targetRepo.AddTargetCredentialSources(ctx, tar.GetPublicId(), tar.GetVersion(), []string{libs[0].PublicId, libs[1].PublicId})
require.NoError(t, err)
creds := []*session.DynamicCredential{
session.NewDynamicCredential(libs[0].GetPublicId(), credential.ApplicationPurpose),
session.NewDynamicCredential(libs[0].GetPublicId(), credential.IngressPurpose),
session.NewDynamicCredential(libs[1].GetPublicId(), credential.EgressPurpose),
}
return creds
}

@ -4,7 +4,7 @@ package schema
func init() {
migrationStates["postgres"] = migrationState{
binarySchemaVersion: 16004,
binarySchemaVersion: 16005,
upMigrations: map[int][]byte{
1: []byte(`
create domain wt_public_id as text
@ -7014,6 +7014,134 @@ alter table wh_host_dimension
insert into oplog_ticket (name, version)
values
('plugin', 1);
`),
16001: []byte(`
-- replaces check from internal/db/schema/migrations/postgres/0/60_wh_domain_types.up.sql
alter domain wh_public_id drop constraint wh_public_id_check;
alter domain wh_public_id add constraint wh_public_id_check
check(
value = 'None'
or
value = 'Unknown'
or
length(trim(value)) > 10
);
create table wh_credential_dimension (
-- random id generated using encode(digest(gen_random_bytes(16), 'sha256'), 'base64')
-- this is done to prevent conflicts with rows in other clusters
-- which enables warehouse data from multiple clusters to be loaded into a
-- single database instance
key wh_dim_key primary key default wh_dim_key(),
credential_purpose wh_dim_text,
credential_library_id wh_public_id not null,
credential_library_type wh_dim_text,
credential_library_name wh_dim_text,
credential_library_description wh_dim_text,
credential_library_vault_path wh_dim_text,
credential_library_vault_http_method wh_dim_text,
credential_library_vault_http_request_body wh_dim_text,
credential_store_id wh_public_id not null,
credential_store_type wh_dim_text,
credential_store_name wh_dim_text,
credential_store_description wh_dim_text,
credential_store_vault_namespace wh_dim_text,
credential_store_vault_address wh_dim_text,
target_id wh_public_id not null,
target_type wh_dim_text,
target_name wh_dim_text,
target_description wh_dim_text,
target_default_port_number integer not null,
target_session_max_seconds integer not null,
target_session_connection_limit integer not null,
project_id wt_scope_id not null,
project_name wh_dim_text,
project_description wh_dim_text,
organization_id wt_scope_id not null,
organization_name wh_dim_text,
organization_description wh_dim_text,
current_row_indicator wh_dim_text,
row_effective_time wh_timestamp,
row_expiration_time wh_timestamp
);
-- https://www.postgresql.org/docs/current/indexes-partial.html
create unique index wh_credential_dim_current_constraint
on wh_credential_dimension (credential_library_id, credential_store_id, target_id, credential_purpose)
where current_row_indicator = 'Current';
-- One part of a bridge table to associated the set of wh_credential_dimension with a fact table.
-- The other part of the bridge is wh_credential_group_membership.
create table wh_credential_group (
-- random id generated using encode(digest(gen_random_bytes(16), 'sha256'), 'base64')
-- this is done to prevent conflicts with rows in other clusters
-- which enables warehouse data from multiple clusters to be loaded into a
-- single database instance
key wh_dim_key primary key default wh_dim_key()
);
-- The second part of the bridge table. The other part is wh_credential_group.
create table wh_credential_group_membership (
credential_group_key wh_dim_key not null
references wh_credential_group (key)
on delete restrict
on update cascade,
credential_key wh_dim_key not null
references wh_credential_dimension (key)
on delete restrict
on update cascade
);
-- Add "no credentials" and "Unknown" group an dimension.
-- When a session has no credentials "no credentials" is used as the "None" value.
-- "Unknown" is used for existing data prior to the credential_dimension existing.
insert into wh_credential_group
(key)
values
('no credentials'),
('Unknown');
insert into wh_credential_dimension (
key,
credential_purpose,
credential_library_id, credential_library_type, credential_library_name, credential_library_description, credential_library_vault_path, credential_library_vault_http_method, credential_library_vault_http_request_body,
credential_store_id, credential_store_type, credential_store_name, credential_store_description, credential_store_vault_namespace, credential_store_vault_address,
target_id, target_type, target_name, target_description, target_default_port_number, target_session_max_seconds, target_session_connection_limit,
project_id, project_name, project_description,
organization_id, organization_name, organization_description,
current_row_indicator, row_effective_time, row_expiration_time
)
values
(
'no credential',
'None',
'None', 'None', 'None', 'None', 'None', 'None', 'None',
'None', 'None', 'None', 'None', 'None', 'None',
'None', 'None', 'None', 'None', -1, -1, -1,
'00000000000', 'None', 'None',
'00000000000', 'None', 'None',
'Current', now(), 'infinity'::timestamptz
),
(
'Unknown',
'Unknown',
'Unknown', 'Unknown', 'Unknown', 'Unknown', 'Unknown', 'Unknown', 'Unknown',
'Unknown', 'Unknown', 'Unknown', 'Unknown', 'Unknown', 'Unknown',
'Unknown', 'Unknown', 'Unknown', 'Unknown', -1, -1, -1,
'00000000000', 'Unknown', 'Unknown',
'00000000000', 'Unknown', 'Unknown',
'Current', now(), 'infinity'::timestamptz
);
insert into wh_credential_group_membership
(credential_group_key, credential_key)
values
('no credentials', 'no credential'),
('Unknown', 'Unknown');
`),
16002: []byte(`
/*
@ -7101,6 +7229,106 @@ alter table wh_host_dimension
insert into oplog_ticket (name, version)
values
('plugin_host', 1);
`),
16002: []byte(`
-- The whx_credential_dimension_source and whx_credential_dimension_target views are used
-- by an insert trigger to determine if the current row for the dimension has
-- changed and a new one needs to be inserted. The first column in the target view
-- must be the current warehouse id and all remaining columns must match the columns
-- in the source view.
-- The whx_credential_dimension_source view shows the current values in the
-- operational tables of the credential dimension.
create view whx_credential_dimension_source as
select -- id is the first column in the target view
s.public_id as session_id,
coalesce(scd.credential_purpose, 'None') as credential_purpose,
cl.public_id as credential_library_id,
case
when vcl is null then 'None'
else 'vault credential library'
end as credential_library_type,
coalesce(vcl.name, 'None') as credential_library_name,
coalesce(vcl.description, 'None') as credential_library_description,
coalesce(vcl.vault_path, 'None') as credential_library_vault_path,
coalesce(vcl.http_method, 'None') as credential_library_vault_http_method,
coalesce(vcl.http_request_body, 'None') as credential_library_vault_http_request_body,
cs.public_id as credential_store_id,
case
when vcs is null then 'None'
else 'vault credential store'
end as credential_store_type,
coalesce(vcs.name, 'None') as credential_store_name,
coalesce(vcs.description, 'None') as credential_store_description,
coalesce(vcs.namespace, 'None') as credential_store_vault_namespace,
coalesce(vcs.vault_address, 'None') as credential_store_vault_address,
t.public_id as target_id,
'tcp target' as target_type,
coalesce(tt.name, 'None') as target_name,
coalesce(tt.description, 'None') as target_description,
coalesce(tt.default_port, 0) as target_default_port_number,
tt.session_max_seconds as target_session_max_seconds,
tt.session_connection_limit as target_session_connection_limit,
p.public_id as project_id,
coalesce(p.name, 'None') as project_name,
coalesce(p.description, 'None') as project_description,
o.public_id as organization_id,
coalesce(o.name, 'None') as organization_name,
coalesce(o.description, 'None') as organization_description
from session_credential_dynamic as scd,
session as s,
credential_library as cl,
credential_store as cs,
credential_vault_library as vcl,
credential_vault_store as vcs,
target as t,
target_tcp as tt,
iam_scope as p,
iam_scope as o
where scd.library_id = cl.public_id
and cl.store_id = cs.public_id
and vcl.public_id = cl.public_id
and vcs.public_id = cs.public_id
and s.public_id = scd.session_id
and s.target_id = t.public_id
and t.public_id = tt.public_id
and p.public_id = t.scope_id
and p.type = 'project'
and o.public_id = p.parent_id
and o.type = 'org';
create view whx_credential_dimension_target as
select key,
credential_purpose,
credential_library_id,
credential_library_type,
credential_library_name,
credential_library_description,
credential_library_vault_path,
credential_library_vault_http_method,
credential_library_vault_http_request_body,
credential_store_id,
credential_store_type,
credential_store_name,
credential_store_description,
credential_store_vault_namespace,
credential_store_vault_address,
target_id,
target_type,
target_name,
target_description,
target_default_port_number,
target_session_max_seconds,
target_session_connection_limit,
project_id,
project_name,
project_description,
organization_id,
organization_name,
organization_description
from wh_credential_dimension
where current_row_indicator = 'Current'
;
`),
16003: []byte(`
-- We are adding the name to the base host catalog type. This allows the db
@ -7160,6 +7388,176 @@ alter table wh_host_dimension
create trigger update_host_catalog_subtype before update on static_host_catalog
for each row execute procedure update_host_catalog_subtype();
`),
16003: []byte(`
-- wh_upsert_credential_dimension compares the current vaules in the wh_credential_dimension
-- with the current values in the operational tables for the given parameters. IF the values
-- between operational tables and the wh_credential_dimension differ, a new row is inserted in
-- the wh_credential_dimension to match the current values in the operational tables.
create function wh_upsert_credential_dimension(p_session_id wt_public_id, p_library_id wt_public_id, p_credential_purpose wh_dim_text)
returns wh_dim_key
as $$
declare
src whx_credential_dimension_target%rowtype;
target whx_credential_dimension_target%rowtype;
new_row wh_credential_dimension%rowtype;
t_id wt_public_id;
begin
select s.target_id into strict t_id
from session as s
where s.public_id = p_session_id;
select * into target
from whx_credential_dimension_target as t
where t.credential_library_id = p_library_id
and t.target_id = t_id
and t.credential_purpose = p_credential_purpose;
select
target.key, t.credential_purpose,
t.credential_library_id, t.credential_library_type, t.credential_library_name, t.credential_library_description, t.credential_library_vault_path, t.credential_library_vault_http_method, t.credential_library_vault_http_request_body,
t.credential_store_id, t.credential_store_type, t.credential_store_name, t.credential_store_description, t.credential_store_vault_namespace, t.credential_store_vault_address,
t.target_id, t.target_type, t.target_name, t.target_description, t.target_default_port_number, t.target_session_max_seconds, t.target_session_connection_limit,
t.project_id, t.project_name, t.project_description,
t.organization_id, t.organization_name, t.organization_description
into src
from whx_credential_dimension_source as t
where t.credential_library_id = p_library_id
and t.session_id = p_session_id
and t.target_id = t_id
and t.credential_purpose = p_credential_purpose;
if src is distinct from target then
update wh_credential_dimension
set current_row_indicator = 'Expired',
row_expiration_time = current_timestamp
where credential_library_id = p_library_id
and target_id = t_id
and credential_purpose = p_credential_purpose
and current_row_indicator = 'Current';
insert into wh_credential_dimension (
credential_purpose,
credential_library_id, credential_library_type, credential_library_name, credential_library_description, credential_library_vault_path, credential_library_vault_http_method, credential_library_vault_http_request_body,
credential_store_id, credential_store_type, credential_store_name, credential_store_description, credential_store_vault_namespace, credential_store_vault_address,
target_id, target_type, target_name, target_description, target_default_port_number, target_session_max_seconds, target_session_connection_limit,
project_id, project_name, project_description,
organization_id, organization_name, organization_description,
current_row_indicator, row_effective_time, row_expiration_time
)
select credential_purpose,
credential_library_id, credential_library_type, credential_library_name, credential_library_description, credential_library_vault_path, credential_library_vault_http_method, credential_library_vault_http_request_body,
credential_store_id, credential_store_type, credential_store_name, credential_store_description, credential_store_vault_namespace, credential_store_vault_address,
target_id, target_type, target_name, target_description, target_default_port_number, target_session_max_seconds, target_session_connection_limit,
project_id, project_name, project_description,
organization_id, organization_name, organization_description,
'Current', current_timestamp, 'infinity'::timestamptz
from whx_credential_dimension_source
where credential_library_id = p_library_id
and session_id = p_session_id
and target_id = t_id
and credential_purpose = p_credential_purpose
returning * into new_row;
return new_row.key;
end if;
return target.key;
end
$$ language plpgsql;
-- Run wh_upsert_credential_dimension for session_credential_dynamic row that is inserted.
create function wh_insert_session_credential_dynamic()
returns trigger
as $$
begin
perform wh_upsert_credential_dimension(new.session_id, new.library_id, new.credential_purpose);
return null;
end;
$$ language plpgsql;
create trigger wh_insert_session_credential_dynamic
after insert on session_credential_dynamic
for each row
execute function wh_insert_session_credential_dynamic();
-- wh_upsert_credentail_group determines if a new wh_credential_group needs to be
-- created due to changes to the coresponding wh_credential_dimensions. It then
-- updates the wh_session_accumulating_fact to associate it with the correct wh_credential_group.
create function wh_upsert_credentail_group()
returns trigger
as $$
declare
cg_key wh_dim_key;
t_id wt_public_id;
s_id wt_public_id;
c_key wh_dim_key;
begin
select distinct scd.session_id into strict s_id
from new_table as scd;
select distinct s.target_id into strict t_id
from new_table as scd
left join session as s on s.public_id = scd.session_id;
-- based on query written by Michele Gaffney
with
credential_list (key) as (
select key
from wh_credential_dimension
where target_id = t_id
and credential_library_id in (select credential_library_id from new_table)
)
select distinct credential_group_key into cg_key
from wh_credential_group_membership a
where a.credential_key in (select key from credential_list)
and (select count(key) from credential_list) =
(
select count(b.credential_key)
from wh_credential_group_membership b
where a.credential_key = b.credential_key
and b.credential_key in (select key from credential_list)
)
and not exists
(
select 1
from wh_credential_group_membership b
where a.credential_key = b.credential_key
and b.credential_key not in (select key from credential_list)
)
;
if cg_key is null then
insert into wh_credential_group default values returning key into cg_key;
for c_key in
select key
from wh_credential_dimension
where target_id = t_id
and credential_library_id in (select credential_library_id from new_table)
loop
insert into wh_credential_group_membership
(credential_group_key, credential_key)
values
(cg_key, c_key);
end loop;
end if;
update wh_session_connection_accumulating_fact
set credential_group_key = cg_key
where session_id = s_id;
return null;
end;
$$ language plpgsql;
-- Run wh_upsert_credentail_group on statement. This assumes that all relevant
-- session_credential_dynamic rows are inserted as a single statement and that
-- the wh_insert_session_credential_dynamic trigger ran for each row and updated
-- the wh_credential_dimensions. Then this statement trigger can run to update the
-- bridge tables and wh_session_accumulating_fact.
create trigger wh_insert_stmt_session_credential_dynamic
after insert on session_credential_dynamic
referencing new table as new_table
for each statement
execute function wh_upsert_credentail_group();
`),
16004: []byte(`
/*
@ -7325,6 +7723,134 @@ alter table wh_host_dimension
('host_plugin_catalog', 1),
('host_plugin_catalog_secret', 1),
('host_plugin_set', 1);
`),
16004: []byte(`
alter table wh_session_accumulating_fact
add column credential_group_key wh_dim_key not null
default 'Unknown'
references wh_credential_group (key)
on delete restrict
on update cascade;
alter table wh_session_accumulating_fact
alter column credential_group_key drop default;
-- replaces function from 15/01_wh_rename_key_columns.up.sql
drop trigger wh_insert_session on session;
drop function wh_insert_session;
create function wh_insert_session()
returns trigger
as $$
declare
new_row wh_session_accumulating_fact%rowtype;
begin
with
pending_timestamp (date_dim_key, time_dim_key, ts) as (
select wh_date_key(start_time), wh_time_key(start_time), start_time
from session_state
where session_id = new.public_id
and state = 'pending'
)
insert into wh_session_accumulating_fact (
session_id,
auth_token_id,
host_key,
user_key,
credential_group_key,
session_pending_date_key,
session_pending_time_key,
session_pending_time
)
select new.public_id,
new.auth_token_id,
wh_upsert_host(new.host_id, new.host_set_id, new.target_id),
wh_upsert_user(new.user_id, new.auth_token_id),
'no credentials', -- will be updated by wh_upsert_credentail_group
pending_timestamp.date_dim_key,
pending_timestamp.time_dim_key,
pending_timestamp.ts
from pending_timestamp
returning * into strict new_row;
return null;
end;
$$ language plpgsql;
create trigger wh_insert_session
after insert on session
for each row
execute function wh_insert_session();
`),
16005: []byte(`
alter table wh_session_connection_accumulating_fact
add column credential_group_key wh_dim_key not null
default 'Unknown'
references wh_credential_group (key)
on delete restrict
on update cascade;
alter table wh_session_connection_accumulating_fact
alter column credential_group_key drop default;
-- replaces function from 15/01_wh_rename_key_columns.up.sql
drop trigger wh_insert_session_connection on session_connection;
drop function wh_insert_session_connection;
create function wh_insert_session_connection()
returns trigger
as $$
declare
new_row wh_session_connection_accumulating_fact%rowtype;
begin
with
authorized_timestamp (date_dim_key, time_dim_key, ts) as (
select wh_date_key(start_time), wh_time_key(start_time), start_time
from session_connection_state
where connection_id = new.public_id
and state = 'authorized'
),
session_dimension (host_dim_key, user_dim_key, credential_group_dim_key) as (
select host_key, user_key, credential_group_key
from wh_session_accumulating_fact
where session_id = new.session_id
)
insert into wh_session_connection_accumulating_fact (
connection_id,
session_id,
host_key,
user_key,
credential_group_key,
connection_authorized_date_key,
connection_authorized_time_key,
connection_authorized_time,
client_tcp_address,
client_tcp_port_number,
endpoint_tcp_address,
endpoint_tcp_port_number,
bytes_up,
bytes_down
)
select new.public_id,
new.session_id,
session_dimension.host_dim_key,
session_dimension.user_dim_key,
session_dimension.credential_group_dim_key,
authorized_timestamp.date_dim_key,
authorized_timestamp.time_dim_key,
authorized_timestamp.ts,
new.client_tcp_address,
new.client_tcp_port,
new.endpoint_tcp_address,
new.endpoint_tcp_port,
new.bytes_up,
new.bytes_down
from authorized_timestamp,
session_dimension
returning * into strict new_row;
perform wh_rollup_connections(new.session_id);
return null;
end;
$$ language plpgsql;
create trigger wh_insert_session_connection
after insert on session_connection
for each row
execute function wh_insert_session_connection();
`),
2001: []byte(`
-- log_migration entries represent logs generated during migrations

@ -175,10 +175,26 @@ begin;
('t_________cr', 's___1cr-sths'),
('t_________cr', 's___2cr-sths');
insert into credential_vault_store
(scope_id, public_id, name, description, vault_address, namespace)
values
('p____bcolors', 'vs_______cvs', 'color vault store', 'None', 'https://vault.color', 'blue');
insert into credential_vault_library
(store_id, public_id, name, description, vault_path, http_method)
values
('vs_______cvs', 'vl______cvl', 'color vault library', 'None', '/secrets', 'GET');
insert into target_credential_library
(target_id, credential_library_id, credential_purpose)
values
('t_________cb', 'vl______cvl', 'application');
insert into session
( scope_id , target_id , host_set_id , host_id , user_id , auth_token_id , certificate , endpoint , public_id)
( scope_id, target_id, host_set_id, host_id, user_id, auth_token_id, certificate, endpoint, public_id)
values
('p____bcolors' , 't_________cb' , 's___1cb-sths' , 'h_____cb__01' , 'u______clare' , 'tok____clare' , 'abc'::bytea , 'ep1' , 's1_____clare');
('p____bcolors', 't_________cb', 's___1cb-sths', 'h_____cb__01', 'u______clare', 'tok____clare', 'abc'::bytea, 'ep1', 's1_____clare');
insert into session_connection
(session_id, public_id)

@ -261,5 +261,31 @@ begin;
end;
$$ language plpgsql;
create function _wtt_load_widgets_credentials()
returns void
as $$
begin
insert into credential_vault_store
(scope_id, public_id, name, description, vault_address, namespace)
values
('p____bwidget', 'vs_______wvs', 'widget vault store', 'None', 'https://vault.widget', 'default');
insert into credential_vault_library
(store_id, public_id, name, description, vault_path, http_method)
values
('vs_______wvs', 'vl______wvl1', 'widget vault library', 'None', '/secrets', 'GET'),
('vs_______wvs', 'vl______wvl2', 'widget vault ssh', 'None', '/secrets/ssh/admin', 'GET'),
('vs_______wvs', 'vl______wvl3', 'widget vault kv', 'None', '/secrets/kv', 'GET');
insert into target_credential_library
(target_id, credential_library_id, credential_purpose)
values
('t_________wb', 'vl______wvl1', 'application'),
('t_________wb', 'vl______wvl2', 'application'),
('t_________wb', 'vl______wvl3', 'application'),
('t_________wb', 'vl______wvl3', 'egress');
end;
$$ language plpgsql;
commit;

@ -0,0 +1,53 @@
begin;
select plan(2);
insert into wh_credential_dimension (
credential_purpose,
credential_library_id, credential_library_type, credential_library_name, credential_library_description, credential_library_vault_path, credential_library_vault_http_method, credential_library_vault_http_request_body,
credential_store_id, credential_store_type, credential_store_name, credential_store_description, credential_store_vault_namespace, credential_store_vault_address,
target_id, target_type, target_name, target_description, target_default_port_number, target_session_max_seconds, target_session_connection_limit,
project_id, project_name, project_description,
organization_id, organization_name, organization_description,
current_row_indicator, row_effective_time, row_expiration_time
) values (
'application',
'vl______wvl1', 'vault credential library', 'gidget vault library', 'None', '/secrets', 'GET', '\x4e6f6e65',
'vs_______wvs', 'vault credential store', 'widget vault store', 'None', 'default', 'https://vault.widget',
't_________wb', 'tcp target', 'Big Widget Target', 'None', 0, 28800, 1,
'p____bwidget', 'Big Widget Factory', 'None',
'o_____widget', 'Widget Inc', 'None',
'Current', current_timestamp, 'infinity'::timestamptz
);
update wh_credential_dimension
set current_row_indicator = 'Expired',
row_expiration_time = current_timestamp
where credential_library_id = 'vl______wvl1'
and credential_store_id = 'vs_______wvs'
and target_id = 't_________wb'
and credential_purpose = 'application'
and current_row_indicator = 'Current';
insert into wh_credential_dimension (
credential_purpose,
credential_library_id, credential_library_type, credential_library_name, credential_library_description, credential_library_vault_path, credential_library_vault_http_method, credential_library_vault_http_request_body,
credential_store_id, credential_store_type, credential_store_name, credential_store_description, credential_store_vault_namespace, credential_store_vault_address,
target_id, target_type, target_name, target_description, target_default_port_number, target_session_max_seconds, target_session_connection_limit,
project_id, project_name, project_description,
organization_id, organization_name, organization_description,
current_row_indicator, row_effective_time, row_expiration_time
) values (
'application',
'vl______wvl1', 'vault credential library', 'gidget vault library', 'None', '/secrets', 'GET', '\x4e6f6e65',
'vs_______wvs', 'vault credential store', 'widget vault store', 'None', 'default', 'https://vault.widget',
't_________wb', 'tcp target', 'Big Widget Target', 'None', 0, 28800, 1,
'p____bwidget', 'Big Widget Factory', 'None',
'o_____widget', 'Widget Inc', 'None',
'Current', current_timestamp, 'infinity'::timestamptz
);
select is(count(*), 2::bigint) from wh_credential_dimension where organization_id = 'o_____widget';
select is(count(*), 1::bigint) from wh_credential_dimension where organization_id = 'o_____widget' and current_row_indicator = 'Current';
select * from finish();
rollback;

@ -0,0 +1,108 @@
-- session_multiple_sessions tests the wh_credential_dimension when
-- multiple sessions are created using.
begin;
select plan(13);
select wtt_load('widgets', 'iam', 'kms', 'auth', 'hosts', 'targets', 'credentials');
-- ensure no existing dimensions
select is(count(*), 0::bigint) from wh_credential_dimension where organization_id = 'o_____widget';
-- insert first session, should result in a new credentials dimension
insert into session
( scope_id, target_id, host_set_id, host_id, user_id, auth_token_id, certificate, endpoint, public_id)
values
('p____bwidget', 't_________wb', 's___1wb-sths', 'h_____wb__01', 'u_____walter', 'tok___walter', 'abc'::bytea, 'ep1', 's1____walter');
insert into session_credential_dynamic
( session_id, library_id, credential_id, credential_purpose)
values
('s1____walter', 'vl______wvl1', null, 'application');
select is(count(*), 1::bigint) from wh_credential_dimension where organization_id = 'o_____widget';
-- another session with:
-- * same user
-- * same auth
-- * same host
-- should not result in a new credential dimension
insert into session
( scope_id, target_id, host_set_id, host_id, user_id, auth_token_id, certificate, endpoint, public_id)
values
('p____bwidget', 't_________wb', 's___1wb-sths', 'h_____wb__01', 'u_____walter', 'tok___walter', 'abc'::bytea, 'ep1', 's2____walter');
insert into session_credential_dynamic
( session_id, library_id, credential_id, credential_purpose)
values
('s2____walter', 'vl______wvl1', null, 'application');
select is(count(*), 1::bigint) from wh_credential_dimension where organization_id = 'o_____widget';
-- change the crediential for the target
update credential_vault_library set vault_path = '/secrets/tcp/admin' where public_id = 'vl______wvl1';
-- start another session, should result in a new credential dimension
insert into session
( scope_id, target_id, host_set_id, host_id, user_id, auth_token_id, certificate, endpoint, public_id)
values
('p____bwidget', 't_________wb', 's___1wb-sths', 'h_____wb__01', 'u_____walter', 'tok___walter', 'abc'::bytea, 'ep1', 's3____walter');
insert into session_credential_dynamic
( session_id, library_id, credential_id, credential_purpose)
values
('s3____walter', 'vl______wvl1', null, 'application');
select is(count(*), 2::bigint) from wh_credential_dimension where organization_id = 'o_____widget';
-- start another session, should result in a one new credential dimensions
insert into session
( scope_id, target_id, host_set_id, host_id, user_id, auth_token_id, certificate, endpoint, public_id)
values
('p____bwidget', 't_________wb', 's___1wb-sths', 'h_____wb__01', 'u_____walter', 'tok___walter', 'abc'::bytea, 'ep1', 's4____walter');
insert into session_credential_dynamic
( session_id, library_id, credential_id, credential_purpose)
values
('s4____walter', 'vl______wvl1', null, 'application'),
('s4____walter', 'vl______wvl2', null, 'application');
select is(count(*), 3::bigint) from wh_credential_dimension where organization_id = 'o_____widget';
-- change the crediential again for the target
update credential_vault_library set vault_path = '/secrets/tcp/user' where vault_path = '/secrets/tcp/admin';
-- start another session, should result in a one new credential dimensions since one changed
insert into session
( scope_id, target_id, host_set_id, host_id, user_id, auth_token_id, certificate, endpoint, public_id)
values
('p____bwidget', 't_________wb', 's___1wb-sths', 'h_____wb__01', 'u_____walter', 'tok___walter', 'abc'::bytea, 'ep1', 's5____walter');
insert into session_credential_dynamic
( session_id, library_id, credential_id, credential_purpose)
values
('s5____walter', 'vl______wvl1', null, 'application'),
('s5____walter', 'vl______wvl2', null, 'application');
select is(count(*), 4::bigint) from wh_credential_dimension where organization_id = 'o_____widget';
select is(count(*), 2::bigint) from wh_credential_dimension where organization_id = 'o_____widget' and current_row_indicator = 'Current';
-- remove all credentials from the target
-- then test creating a session
delete from credential_vault_library;
insert into session
( scope_id, target_id, host_set_id, host_id, user_id, auth_token_id, certificate, endpoint, public_id)
values
('p____bwidget', 't_________wb', 's___1wb-sths', 'h_____wb__01', 'u_____walter', 'tok___walter', 'abc'::bytea, 'ep1', 's6____walter');
select is(count(*), 4::bigint) from wh_credential_dimension where organization_id = 'o_____widget';
select is(credential_group_key, 'no credentials') from wh_session_accumulating_fact where session_id = 's6____walter';
insert into session_connection
(session_id, public_id)
values
('s6____walter', 'sc6____walter');
select is(credential_group_key, 'no credentials') from wh_session_connection_accumulating_fact where session_id = 's6____walter';
-- insert into a session for a target that never had any credentials associated with it.
insert into session
( scope_id, target_id, host_set_id, host_id, user_id, auth_token_id, certificate, endpoint, public_id)
values
('p____bwidget', 't_________ws', 's___1ws-sths', 'h_____ws__01', 'u_____walter', 'tok___walter', 'abc'::bytea, 'ep1', 's7____walter');
select is(count(*), 4::bigint) from wh_credential_dimension where organization_id = 'o_____widget';
select is(credential_group_key, 'no credentials') from wh_session_accumulating_fact where session_id = 's7____walter';
insert into session_connection
(session_id, public_id)
values
('s7____walter', 'sc7____walter');
select is(credential_group_key, 'no credentials') from wh_session_connection_accumulating_fact where session_id = 's7____walter';
select * from finish();
rollback;

@ -0,0 +1,33 @@
-- session_update tests the wh_credential_dimension when
-- a session is inserted and then updated.
begin;
select plan(3);
select wtt_load('widgets', 'iam', 'kms', 'auth', 'hosts', 'targets', 'credentials');
-- ensure no existing dimensions
select is(count(*), 0::bigint) from wh_credential_dimension where organization_id = 'o_____widget';
-- insert first session, should result in a new user dimension
insert into session
( scope_id, target_id, host_set_id, host_id, user_id, auth_token_id, certificate, endpoint, public_id)
values
('p____bwidget', 't_________wb', 's___1wb-sths', 'h_____wb__01', 'u_____walter', 'tok___walter', 'abc'::bytea, 'ep1', 's1____walter');
insert into session_credential_dynamic
( session_id, library_id, credential_id, credential_purpose)
values
('s1____walter', 'vl______wvl1', null, 'application');
select is(count(*), 1::bigint) from wh_credential_dimension where organization_id = 'o_____widget';
-- update session, should not impact wh_credential_dimension
update session set
version = 2
where
public_id = 's1____walter';
select is(count(*), 1::bigint) from wh_credential_dimension where organization_id = 'o_____widget';
select * from finish();
rollback;

@ -0,0 +1,47 @@
-- three_credentials_one_change tests that:
-- when a session with three credentials is created
-- three wh_credential_dimensions are created
-- then when one of the credential libraries is updated
-- and a new session is created
-- only one of the wh_credential_dimensions is updated
begin;
select plan(4);
select wtt_load('widgets', 'iam', 'kms', 'auth', 'hosts', 'targets', 'credentials');
-- ensure no existing dimensions
select is(count(*), 0::bigint) from wh_credential_dimension where organization_id = 'o_____widget';
-- insert session and session_credential_dynamic, should result in a three new credential dimensions
insert into session
( scope_id, target_id, host_set_id, host_id, user_id, auth_token_id, certificate, endpoint, public_id)
values
('p____bwidget', 't_________wb', 's___1wb-sths', 'h_____wb__01', 'u_____walter', 'tok___walter', 'abc'::bytea, 'ep1', 's1____walter');
insert into session_credential_dynamic
( session_id, library_id, credential_id, credential_purpose)
values
('s1____walter', 'vl______wvl1', null, 'application'),
('s1____walter', 'vl______wvl2', null, 'application'),
('s1____walter', 'vl______wvl3', null, 'application');
select is(count(*), 3::bigint) from wh_credential_dimension where organization_id = 'o_____widget';
update credential_vault_library set vault_path = '/secrets/tcp/user' where public_id = 'vl______wvl2';
insert into session
( scope_id, target_id, host_set_id, host_id, user_id, auth_token_id, certificate, endpoint, public_id)
values
('p____bwidget', 't_________wb', 's___1wb-sths', 'h_____wb__01', 'u_____walter', 'tok___walter', 'abc'::bytea, 'ep1', 's2____walter');
insert into session_credential_dynamic
( session_id, library_id, credential_id, credential_purpose)
values
('s2____walter', 'vl______wvl1', null, 'application'),
('s2____walter', 'vl______wvl2', null, 'application'),
('s2____walter', 'vl______wvl3', null, 'application');
select is(count(*), 4::bigint) from wh_credential_dimension where organization_id = 'o_____widget';
select is(count(*), 3::bigint) from wh_credential_dimension where organization_id = 'o_____widget' and current_row_indicator = 'Current';
select * from finish();
rollback;

@ -0,0 +1,51 @@
-- source tests the whx_credential_dimension_source view.
begin;
select plan(1);
select wtt_load('widgets', 'iam', 'kms', 'auth', 'hosts', 'targets', 'credentials');
insert into session
( scope_id, target_id, host_set_id, host_id, user_id, auth_token_id, certificate, endpoint, public_id)
values
('p____bwidget', 't_________wb', 's___1wb-sths', 'h_____wb__01', 'u_____walter', 'tok___walter', 'abc'::bytea, 'ep1', 's1____walter');
insert into session_credential_dynamic
( session_id, library_id, credential_id, credential_purpose)
values
('s1____walter', 'vl______wvl1', null, 'application');
select is(s.*, row(
's1____walter',
'application', -- credential_purpose,
'vl______wvl1', -- credential_library_id,
'vault credential library', -- credential_library_type,
'widget vault library', -- credential_library_name,
'None', -- credential_library_description,
'/secrets', -- credential_library_vault_path,
'GET', -- credential_library_vault_http_method,
'None', -- credential_library_vault_http_request_body,
'vs_______wvs', -- credential_store_id,
'vault credential store', -- credential_store_type,
'widget vault store', -- credential_store_name,
'None', -- credential_store_description,
'default', -- credential_store_vault_namespace,
'https://vault.widget', -- credential_store_vault_address,
't_________wb', -- target_id,
'tcp target', -- target_type,
'Big Widget Target', -- target_name,
'None', -- target_description,
0, -- target_default_port_number,
28800, -- target_session_max_seconds,
1, -- target_session_connection_limit,
'p____bwidget', -- project_id,
'Big Widget Factory', -- project_name,
'None', -- project_description,
'o_____widget', -- organization_id,
'Widget Inc', -- organization_name,
'None' -- organization_description
)::whx_credential_dimension_source)
from whx_credential_dimension_source as s
where s.target_id = 't_________wb';
rollback;

@ -0,0 +1,44 @@
-- target tests teh whx_credential_dimension_target view.
begin;
select plan(2);
select is_empty($$select * from whx_credential_dimension_target where target_id = 't_________wb'$$);
insert into wh_credential_dimension
(
key,
credential_purpose,
credential_library_id, credential_library_type, credential_library_name, credential_library_description, credential_library_vault_path, credential_library_vault_http_method, credential_library_vault_http_request_body,
credential_store_id, credential_store_type, credential_store_name, credential_store_description, credential_store_vault_namespace, credential_store_vault_address,
target_id, target_type, target_name, target_description, target_default_port_number, target_session_max_seconds, target_session_connection_limit,
project_id, project_name, project_description,
organization_id, organization_name, organization_description,
current_row_indicator, row_effective_time, row_expiration_time
)
values
(
'wcd________1',
'application',
'vl_______wvl', 'vault credential library', 'widget vault library', 'None', '/secrets', 'GET', 'None',
'vs_______wvs', 'vault credential store', 'widget vault store', 'None', 'blue', 'https://vault.widget',
't_________wb', 'tcp target', 'Big Widget Target', 'None', 0, 28800, 1,
'p____bwidget', 'Big Widget Factory', 'None',
'o_____widget', 'Widget Inc', 'None',
'Current', '2021-07-21T12:01'::timestamptz, 'infinity'::timestamptz
);
select is(t.*, row(
'wcd________1',
'application',
'vl_______wvl', 'vault credential library', 'widget vault library', 'None', '/secrets', 'GET', 'None',
'vs_______wvs', 'vault credential store', 'widget vault store', 'None', 'blue', 'https://vault.widget',
't_________wb', 'tcp target', 'Big Widget Target', 'None', 0, 28800, 1,
'p____bwidget', 'Big Widget Factory', 'None',
'o_____widget', 'Widget Inc', 'None'
)::whx_credential_dimension_target)
from whx_credential_dimension_target as t
where t.target_id = 't_________wb';
select * from finish();
rollback;

@ -0,0 +1,27 @@
--- insert tests a simple insert of session_credential_dynamic
begin;
select plan(6);
select wtt_load('widgets', 'iam', 'kms', 'auth', 'hosts', 'targets', 'credentials');
-- ensure no existing dimensions or bridge table rows
select is(count(*), 0::bigint) from wh_credential_dimension where organization_id = 'o_____widget';
select is(count(*), 0::bigint) from wh_credential_group_membership where credential_group_key != 'no credentials' and credential_group_key != 'Unknown';
select is(count(*), 0::bigint) from wh_credential_group where key != 'no credentials' and key != 'Unknown';
--- insert single credential
insert into session
( scope_id, target_id, host_set_id, host_id, user_id, auth_token_id, certificate, endpoint, public_id)
values
('p____bwidget', 't_________wb', 's___1wb-sths', 'h_____wb__01', 'u_____walter', 'tok___walter', 'abc'::bytea, 'ep1', 's1____walter');
insert into session_credential_dynamic
( session_id, library_id, credential_id, credential_purpose)
values
('s1____walter', 'vl______wvl1', null, 'application');
select is(count(*), 1::bigint) from wh_credential_dimension where organization_id = 'o_____widget';
select is(count(*), 1::bigint) from wh_credential_group_membership where credential_group_key != 'no credentials' and credential_group_key != 'Unknown';
select is(count(*), 1::bigint) from wh_credential_group where key != 'no credentials' and key != 'Unknown';
select * from finish();
rollback;

@ -0,0 +1,29 @@
--- insert_multiple tests inserting multiple session_credential_dynamic as a single statement
begin;
select plan(6);
select wtt_load('widgets', 'iam', 'kms', 'auth', 'hosts', 'targets', 'credentials');
-- ensure no existing dimensions
select is(count(*), 0::bigint) from wh_credential_dimension where organization_id = 'o_____widget';
select is(count(*), 0::bigint) from wh_credential_group_membership where credential_group_key != 'no credentials' and credential_group_key != 'Unknown';
select is(count(*), 0::bigint) from wh_credential_group where key != 'no credentials' and key != 'Unknown';
--- multiple single credentials
insert into session
( scope_id, target_id, host_set_id, host_id, user_id, auth_token_id, certificate, endpoint, public_id)
values
('p____bwidget', 't_________wb', 's___1wb-sths', 'h_____wb__01', 'u_____walter', 'tok___walter', 'abc'::bytea, 'ep1', 's1____walter');
insert into session_credential_dynamic
( session_id, library_id, credential_id, credential_purpose)
values
('s1____walter', 'vl______wvl1', null, 'application'),
('s1____walter', 'vl______wvl2', null, 'application'),
('s1____walter', 'vl______wvl3', null, 'application'),
('s1____walter', 'vl______wvl3', null, 'egress');
select is(count(*), 4::bigint) from wh_credential_dimension where organization_id = 'o_____widget';
select is(count(*), 4::bigint) from wh_credential_group_membership where credential_group_key != 'no credentials' and credential_group_key != 'Unknown';
select is(count(*), 1::bigint) from wh_credential_group where key != 'no credentials' and key != 'Unknown';
select * from finish();
rollback;

@ -38,7 +38,7 @@ func CalculateAuthorizedCollectionActions(ctx context.Context,
if err != nil {
return nil, err
}
ret[k.String()+"s"] = lv
ret[k.PluralString()] = lv
}
}
return ret, nil

@ -501,6 +501,9 @@ func toProto(in credential.Store, opt ...handlers.Option) (*pb.CredentialStore,
if outputFields.Has(globals.AuthorizedActionsField) {
out.AuthorizedActions = opts.WithAuthorizedActions
}
if outputFields.Has(globals.AuthorizedCollectionActionsField) {
out.AuthorizedCollectionActions = opts.WithAuthorizedCollectionActions
}
if outputFields.Has(globals.AttributesField) {
switch credential.SubtypeFromId(in.GetPublicId()) {
case vault.Subtype:

@ -34,7 +34,17 @@ import (
"google.golang.org/protobuf/types/known/wrapperspb"
)
var testAuthorizedActions = []string{"no-op", "read", "update", "delete"}
var (
testAuthorizedActions = []string{"no-op", "read", "update", "delete"}
testAuthorizedCollectionActions = map[string]*structpb.ListValue{
"credential-libraries": {
Values: []*structpb.Value{
structpb.NewStringValue("create"),
structpb.NewStringValue("list"),
},
},
}
)
func TestList(t *testing.T) {
conn, _ := db.TestSetup(t, "postgres")
@ -57,14 +67,15 @@ func TestList(t *testing.T) {
var wantStores []*pb.CredentialStore
for _, s := range vault.TestCredentialStores(t, conn, wrapper, prj.GetPublicId(), 10) {
wantStores = append(wantStores, &pb.CredentialStore{
Id: s.GetPublicId(),
ScopeId: prj.GetPublicId(),
Scope: &scopepb.ScopeInfo{Id: prj.GetPublicId(), Type: scope.Project.String(), ParentScopeId: prj.GetParentId()},
CreatedTime: s.GetCreateTime().GetTimestamp(),
UpdatedTime: s.GetUpdateTime().GetTimestamp(),
Version: s.GetVersion(),
Type: vault.Subtype.String(),
AuthorizedActions: testAuthorizedActions,
Id: s.GetPublicId(),
ScopeId: prj.GetPublicId(),
Scope: &scopepb.ScopeInfo{Id: prj.GetPublicId(), Type: scope.Project.String(), ParentScopeId: prj.GetParentId()},
CreatedTime: s.GetCreateTime().GetTimestamp(),
UpdatedTime: s.GetUpdateTime().GetTimestamp(),
Version: s.GetVersion(),
Type: vault.Subtype.String(),
AuthorizedActions: testAuthorizedActions,
AuthorizedCollectionActions: testAuthorizedCollectionActions,
Attributes: func() *structpb.Struct {
attrs, err := handlers.ProtoToStruct(&pb.VaultCredentialStoreAttributes{
Address: wrapperspb.String(s.GetVaultAddress()),
@ -451,7 +462,8 @@ func TestCreate(t *testing.T) {
require.NoError(t, err)
return attrs
}(),
AuthorizedActions: testAuthorizedActions,
AuthorizedActions: testAuthorizedActions,
AuthorizedCollectionActions: testAuthorizedCollectionActions,
},
},
},
@ -495,7 +507,8 @@ func TestCreate(t *testing.T) {
require.NoError(t, err)
return attrs
}(),
AuthorizedActions: testAuthorizedActions,
AuthorizedActions: testAuthorizedActions,
AuthorizedCollectionActions: testAuthorizedCollectionActions,
},
},
},
@ -580,14 +593,15 @@ func TestGet(t *testing.T) {
id: store.GetPublicId(),
res: &pbs.GetCredentialStoreResponse{
Item: &pb.CredentialStore{
Id: store.GetPublicId(),
ScopeId: store.GetScopeId(),
Scope: &scopepb.ScopeInfo{Id: store.GetScopeId(), Type: scope.Project.String(), ParentScopeId: prj.GetParentId()},
Type: vault.Subtype.String(),
AuthorizedActions: testAuthorizedActions,
CreatedTime: store.CreateTime.GetTimestamp(),
UpdatedTime: store.UpdateTime.GetTimestamp(),
Version: 1,
Id: store.GetPublicId(),
ScopeId: store.GetScopeId(),
Scope: &scopepb.ScopeInfo{Id: store.GetScopeId(), Type: scope.Project.String(), ParentScopeId: prj.GetParentId()},
Type: vault.Subtype.String(),
AuthorizedActions: testAuthorizedActions,
AuthorizedCollectionActions: testAuthorizedCollectionActions,
CreatedTime: store.CreateTime.GetTimestamp(),
UpdatedTime: store.UpdateTime.GetTimestamp(),
Version: 1,
Attributes: func() *structpb.Struct {
attrs, err := handlers.ProtoToStruct(&pb.VaultCredentialStoreAttributes{
Address: wrapperspb.String(store.GetVaultAddress()),

@ -62,7 +62,6 @@ var (
resource.Group: groups.CollectionActions,
resource.Role: roles.CollectionActions,
resource.Scope: CollectionActions,
resource.Session: sessions.CollectionActions,
resource.User: users.CollectionActions,
},
@ -72,7 +71,6 @@ var (
resource.Group: groups.CollectionActions,
resource.Role: roles.CollectionActions,
resource.Scope: CollectionActions,
resource.Session: sessions.CollectionActions,
resource.User: users.CollectionActions,
},
@ -81,6 +79,7 @@ var (
resource.Group: groups.CollectionActions,
resource.HostCatalog: host_catalogs.CollectionActions,
resource.Role: roles.CollectionActions,
resource.Session: sessions.CollectionActions,
resource.Target: targets.CollectionActions,
},
}

@ -89,11 +89,6 @@ var globalAuthorizedCollectionActions = map[string]*structpb.ListValue{
structpb.NewStringValue("list"),
},
},
"sessions": {
Values: []*structpb.Value{
structpb.NewStringValue("list"),
},
},
"users": {
Values: []*structpb.Value{
structpb.NewStringValue("create"),
@ -132,11 +127,6 @@ var orgAuthorizedCollectionActions = map[string]*structpb.ListValue{
structpb.NewStringValue("list"),
},
},
"sessions": {
Values: []*structpb.Value{
structpb.NewStringValue("list"),
},
},
"users": {
Values: []*structpb.Value{
structpb.NewStringValue("create"),
@ -170,6 +160,11 @@ var projectAuthorizedCollectionActions = map[string]*structpb.ListValue{
structpb.NewStringValue("list"),
},
},
"sessions": {
Values: []*structpb.Value{
structpb.NewStringValue("list"),
},
},
"targets": {
Values: []*structpb.Value{
structpb.NewStringValue("create"),

@ -220,7 +220,7 @@ func (s Service) CancelSession(ctx context.Context, req *pbs.CancelSessionReques
var outputFields perms.OutputFieldsMap
authorizedActions := authResults.FetchActionSetForId(ctx, ses.GetPublicId(), IdActions)
// Check to see if we need to verify Read vs. just ReadSelf
// Check to see if we need to verify Cancel vs. just CancelSelf
if ses.UserId != authResults.UserId {
if !authorizedActions.HasAction(action.Cancel) {
return nil, handlers.ForbiddenError()

@ -26,6 +26,7 @@ func (c *Controller) startListeners() error {
configureForAPI := func(ln *base.ServerListener) error {
handler, err := c.handler(HandlerProperties{
ListenerConfig: ln.Config,
CancelCtx: c.baseContext,
})
if err != nil {
return err

@ -1,5 +1,10 @@
package session
import (
"fmt"
"strings"
)
const (
activateStateCte = `
insert into session_state
@ -346,3 +351,34 @@ with
%s
`
)
const (
sessionCredentialDynamicBatchInsertBase = `
insert into session_credential_dynamic
( session_id, library_id, credential_purpose )
values
`
sessionCredentialDynamicBatchInsertValue = `
(?, ?, ?)`
sessionCredentialDynamicBatchInsertReturning = `
returning session_id, library_id, credential_id, credential_purpose
`
)
func batchInsertsessionCredentialDynamic(creds []*DynamicCredential) (string, []interface{}, error) {
if len(creds) <= 0 {
return "", nil, fmt.Errorf("empty slice of DynamicCredential, cannot build query")
}
batchInsertParams := make([]string, 0, len(creds))
batchInsertArgs := make([]interface{}, 0, len(creds)*3)
for _, cred := range creds {
batchInsertParams = append(batchInsertParams, sessionCredentialDynamicBatchInsertValue)
batchInsertArgs = append(batchInsertArgs, []interface{}{cred.SessionId, cred.LibraryId, cred.CredentialPurpose}...)
}
q := sessionCredentialDynamicBatchInsertBase + strings.Join(batchInsertParams, ",") + sessionCredentialDynamicBatchInsertReturning
return q, batchInsertArgs, nil
}

@ -89,14 +89,26 @@ func (r *Repository) CreateSession(ctx context.Context, sessionWrapper wrapping.
if err = w.Create(ctx, returnedSession); err != nil {
return errors.Wrap(ctx, err, op)
}
for _, cred := range newSession.DynamicCredentials {
cred.SessionId = newSession.PublicId
returnedCred := cred.clone()
if err = w.Create(ctx, returnedCred); err != nil {
}
// TODO: after upgrading to gorm v2 this batch insert can be replaced, since gorm v2 supports batch inserts
q, batchInsertArgs, err := batchInsertsessionCredentialDynamic(newSession.DynamicCredentials)
if err == nil {
rows, err := w.Query(ctx, q, batchInsertArgs)
if err != nil {
return errors.Wrap(ctx, err, op)
}
returnedSession.DynamicCredentials = append(returnedSession.DynamicCredentials, returnedCred)
defer rows.Close()
for rows.Next() {
var returnedCred DynamicCredential
w.ScanRows(rows, &returnedCred)
returnedSession.DynamicCredentials = append(returnedSession.DynamicCredentials, &returnedCred)
}
}
var foundStates []*State
// trigger will create new "Pending" state
if foundStates, err = fetchStates(ctx, read, returnedSession.PublicId); err != nil {

@ -60,6 +60,15 @@ func (r Type) String() string {
}[r]
}
func (r Type) PluralString() string {
switch r {
case CredentialLibrary:
return "credential-libraries"
default:
return r.String() + "s"
}
}
var Map = map[string]Type{
Unknown.String(): Unknown,
All.String(): All,

@ -1,11 +1,11 @@
# ***
# WARNING: Do not EDIT or MERGE this file, it is generated by packagespec.
# ***
lockid: 591648cfcfc4fec2
lockid: fd66eb9f99de501d
packagespec-version: 0.2.6
cache-version: 10
packages:
- packagespecid: 71fa601616a225899af09c8369ed9fc2d6d998e2
- packagespecid: ddd70200153c0e65302afe88a629b4440e8eebfa
inputs:
BINARY_NAME: boundary
BUILD_TAGS: ui
@ -13,12 +13,12 @@ packages:
GOARCH: amd64
GOOS: darwin
PRODUCT_NAME: boundary
PRODUCT_VERSION: 0.6.0
PRODUCT_VERSION_MMP: 0.6.0
PRODUCT_VERSION: 0.6.1
PRODUCT_VERSION_MMP: 0.6.1
PRODUCT_VERSION_PRE: ""
meta:
BUILD_JOB_NAME: darwin_amd64_package
BUNDLE_NAME: boundary_0.6.0
BUNDLE_NAME: boundary_0.6.1
builtin:
BUILD_LAYERS:
- type: copy-source
@ -33,7 +33,7 @@ packages:
- type: base
name: 00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16
archive: .buildcache/archives/00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16.tar.gz
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-71fa601616a225899af09c8369ed9fc2d6d998e2
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-ddd70200153c0e65302afe88a629b4440e8eebfa
POST_PROCESSORS: apple-notarize av-scan
circleci:
BUILDER_CACHE_KEY_PREFIX_LIST:
@ -41,14 +41,14 @@ packages:
- go-modules_daae_{{checksum ".buildcache/cache-keys/go-modules-daaedd9766787e3ce8f199fada1cb849024e6ca3"}}
- ui_8f7b_{{checksum ".buildcache/cache-keys/ui-8f7be629bbc1c7ec7af40fb527b15c30d590b13a"}}
- base_ec53_{{checksum ".buildcache/cache-keys/base-ec5342e30fcbfeb5947252a8ea679fb281052d16"}}
PACKAGE_CACHE_KEY: package-71fa601616a225899af09c8369ed9fc2d6d998e2-{{checksum ".buildcache/cache-keys/package-71fa601616a225899af09c8369ed9fc2d6d998e2"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.0 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
PACKAGE_CACHE_KEY: package-ddd70200153c0e65302afe88a629b4440e8eebfa-{{checksum ".buildcache/cache-keys/package-ddd70200153c0e65302afe88a629b4440e8eebfa"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.1 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
aliases:
- type: local
path: boundary_0.6.0_darwin_amd64.zip
path: boundary_0.6.1_darwin_amd64.zip
- type: public-hc-releases
path: boundary/boundary_0.6.0/boundary_0.6.0_darwin_amd64.zip
- packagespecid: 97f48b65a2b1d85a686a4799e8aff6926cfbe04f
path: boundary/boundary_0.6.1/boundary_0.6.1_darwin_amd64.zip
- packagespecid: dca0ba02c8e7351c77442c86523f121a8c83039e
inputs:
BINARY_NAME: boundary
BUILD_TAGS: ui
@ -56,12 +56,12 @@ packages:
GOARCH: "386"
GOOS: freebsd
PRODUCT_NAME: boundary
PRODUCT_VERSION: 0.6.0
PRODUCT_VERSION_MMP: 0.6.0
PRODUCT_VERSION: 0.6.1
PRODUCT_VERSION_MMP: 0.6.1
PRODUCT_VERSION_PRE: ""
meta:
BUILD_JOB_NAME: freebsd_386_package
BUNDLE_NAME: boundary_0.6.0
BUNDLE_NAME: boundary_0.6.1
builtin:
BUILD_LAYERS:
- type: copy-source
@ -76,7 +76,7 @@ packages:
- type: base
name: 00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16
archive: .buildcache/archives/00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16.tar.gz
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-97f48b65a2b1d85a686a4799e8aff6926cfbe04f
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-dca0ba02c8e7351c77442c86523f121a8c83039e
POST_PROCESSORS: av-scan
circleci:
BUILDER_CACHE_KEY_PREFIX_LIST:
@ -84,14 +84,14 @@ packages:
- go-modules_daae_{{checksum ".buildcache/cache-keys/go-modules-daaedd9766787e3ce8f199fada1cb849024e6ca3"}}
- ui_8f7b_{{checksum ".buildcache/cache-keys/ui-8f7be629bbc1c7ec7af40fb527b15c30d590b13a"}}
- base_ec53_{{checksum ".buildcache/cache-keys/base-ec5342e30fcbfeb5947252a8ea679fb281052d16"}}
PACKAGE_CACHE_KEY: package-97f48b65a2b1d85a686a4799e8aff6926cfbe04f-{{checksum ".buildcache/cache-keys/package-97f48b65a2b1d85a686a4799e8aff6926cfbe04f"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.0 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
PACKAGE_CACHE_KEY: package-dca0ba02c8e7351c77442c86523f121a8c83039e-{{checksum ".buildcache/cache-keys/package-dca0ba02c8e7351c77442c86523f121a8c83039e"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.1 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
aliases:
- type: local
path: boundary_0.6.0_freebsd_386.zip
path: boundary_0.6.1_freebsd_386.zip
- type: public-hc-releases
path: boundary/boundary_0.6.0/boundary_0.6.0_freebsd_386.zip
- packagespecid: b6d16e35ea34fa5749ed92c8062e1cea96fd24f4
path: boundary/boundary_0.6.1/boundary_0.6.1_freebsd_386.zip
- packagespecid: 72b242aa7004a2a706bd894df71c002671e8e425
inputs:
BINARY_NAME: boundary
BUILD_TAGS: ui
@ -99,12 +99,12 @@ packages:
GOARCH: amd64
GOOS: freebsd
PRODUCT_NAME: boundary
PRODUCT_VERSION: 0.6.0
PRODUCT_VERSION_MMP: 0.6.0
PRODUCT_VERSION: 0.6.1
PRODUCT_VERSION_MMP: 0.6.1
PRODUCT_VERSION_PRE: ""
meta:
BUILD_JOB_NAME: freebsd_amd64_package
BUNDLE_NAME: boundary_0.6.0
BUNDLE_NAME: boundary_0.6.1
builtin:
BUILD_LAYERS:
- type: copy-source
@ -119,7 +119,7 @@ packages:
- type: base
name: 00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16
archive: .buildcache/archives/00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16.tar.gz
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-b6d16e35ea34fa5749ed92c8062e1cea96fd24f4
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-72b242aa7004a2a706bd894df71c002671e8e425
POST_PROCESSORS: av-scan
circleci:
BUILDER_CACHE_KEY_PREFIX_LIST:
@ -127,14 +127,14 @@ packages:
- go-modules_daae_{{checksum ".buildcache/cache-keys/go-modules-daaedd9766787e3ce8f199fada1cb849024e6ca3"}}
- ui_8f7b_{{checksum ".buildcache/cache-keys/ui-8f7be629bbc1c7ec7af40fb527b15c30d590b13a"}}
- base_ec53_{{checksum ".buildcache/cache-keys/base-ec5342e30fcbfeb5947252a8ea679fb281052d16"}}
PACKAGE_CACHE_KEY: package-b6d16e35ea34fa5749ed92c8062e1cea96fd24f4-{{checksum ".buildcache/cache-keys/package-b6d16e35ea34fa5749ed92c8062e1cea96fd24f4"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.0 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
PACKAGE_CACHE_KEY: package-72b242aa7004a2a706bd894df71c002671e8e425-{{checksum ".buildcache/cache-keys/package-72b242aa7004a2a706bd894df71c002671e8e425"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.1 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
aliases:
- type: local
path: boundary_0.6.0_freebsd_amd64.zip
path: boundary_0.6.1_freebsd_amd64.zip
- type: public-hc-releases
path: boundary/boundary_0.6.0/boundary_0.6.0_freebsd_amd64.zip
- packagespecid: c8401fcf82b86f62e39516bc847423296f351dcf
path: boundary/boundary_0.6.1/boundary_0.6.1_freebsd_amd64.zip
- packagespecid: 2409a41c5c5a434b5dbf77d29deb31a24c825dea
inputs:
BINARY_NAME: boundary
BUILD_TAGS: ui
@ -142,12 +142,12 @@ packages:
GOARCH: arm
GOOS: freebsd
PRODUCT_NAME: boundary
PRODUCT_VERSION: 0.6.0
PRODUCT_VERSION_MMP: 0.6.0
PRODUCT_VERSION: 0.6.1
PRODUCT_VERSION_MMP: 0.6.1
PRODUCT_VERSION_PRE: ""
meta:
BUILD_JOB_NAME: freebsd_arm_package
BUNDLE_NAME: boundary_0.6.0
BUNDLE_NAME: boundary_0.6.1
builtin:
BUILD_LAYERS:
- type: copy-source
@ -162,7 +162,7 @@ packages:
- type: base
name: 00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16
archive: .buildcache/archives/00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16.tar.gz
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-c8401fcf82b86f62e39516bc847423296f351dcf
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-2409a41c5c5a434b5dbf77d29deb31a24c825dea
POST_PROCESSORS: av-scan
circleci:
BUILDER_CACHE_KEY_PREFIX_LIST:
@ -170,14 +170,14 @@ packages:
- go-modules_daae_{{checksum ".buildcache/cache-keys/go-modules-daaedd9766787e3ce8f199fada1cb849024e6ca3"}}
- ui_8f7b_{{checksum ".buildcache/cache-keys/ui-8f7be629bbc1c7ec7af40fb527b15c30d590b13a"}}
- base_ec53_{{checksum ".buildcache/cache-keys/base-ec5342e30fcbfeb5947252a8ea679fb281052d16"}}
PACKAGE_CACHE_KEY: package-c8401fcf82b86f62e39516bc847423296f351dcf-{{checksum ".buildcache/cache-keys/package-c8401fcf82b86f62e39516bc847423296f351dcf"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.0 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
PACKAGE_CACHE_KEY: package-2409a41c5c5a434b5dbf77d29deb31a24c825dea-{{checksum ".buildcache/cache-keys/package-2409a41c5c5a434b5dbf77d29deb31a24c825dea"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.1 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
aliases:
- type: local
path: boundary_0.6.0_freebsd_arm.zip
path: boundary_0.6.1_freebsd_arm.zip
- type: public-hc-releases
path: boundary/boundary_0.6.0/boundary_0.6.0_freebsd_arm.zip
- packagespecid: 30ba4053218e6b6a3f39b9d67cc430f9385e70cb
path: boundary/boundary_0.6.1/boundary_0.6.1_freebsd_arm.zip
- packagespecid: 1439f62f659d115a6675bef8de63d7a576370da6
inputs:
BINARY_NAME: boundary
BUILD_TAGS: ui
@ -185,12 +185,12 @@ packages:
GOARCH: "386"
GOOS: linux
PRODUCT_NAME: boundary
PRODUCT_VERSION: 0.6.0
PRODUCT_VERSION_MMP: 0.6.0
PRODUCT_VERSION: 0.6.1
PRODUCT_VERSION_MMP: 0.6.1
PRODUCT_VERSION_PRE: ""
meta:
BUILD_JOB_NAME: linux_386_package
BUNDLE_NAME: boundary_0.6.0
BUNDLE_NAME: boundary_0.6.1
builtin:
BUILD_LAYERS:
- type: copy-source
@ -205,7 +205,7 @@ packages:
- type: base
name: 00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16
archive: .buildcache/archives/00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16.tar.gz
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-30ba4053218e6b6a3f39b9d67cc430f9385e70cb
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-1439f62f659d115a6675bef8de63d7a576370da6
POST_PROCESSORS: av-scan
circleci:
BUILDER_CACHE_KEY_PREFIX_LIST:
@ -213,14 +213,14 @@ packages:
- go-modules_daae_{{checksum ".buildcache/cache-keys/go-modules-daaedd9766787e3ce8f199fada1cb849024e6ca3"}}
- ui_8f7b_{{checksum ".buildcache/cache-keys/ui-8f7be629bbc1c7ec7af40fb527b15c30d590b13a"}}
- base_ec53_{{checksum ".buildcache/cache-keys/base-ec5342e30fcbfeb5947252a8ea679fb281052d16"}}
PACKAGE_CACHE_KEY: package-30ba4053218e6b6a3f39b9d67cc430f9385e70cb-{{checksum ".buildcache/cache-keys/package-30ba4053218e6b6a3f39b9d67cc430f9385e70cb"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.0 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
PACKAGE_CACHE_KEY: package-1439f62f659d115a6675bef8de63d7a576370da6-{{checksum ".buildcache/cache-keys/package-1439f62f659d115a6675bef8de63d7a576370da6"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.1 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
aliases:
- type: local
path: boundary_0.6.0_linux_386.zip
path: boundary_0.6.1_linux_386.zip
- type: public-hc-releases
path: boundary/boundary_0.6.0/boundary_0.6.0_linux_386.zip
- packagespecid: 809c3d22aa07bb5c66dca99d44efdd2bb37f2525
path: boundary/boundary_0.6.1/boundary_0.6.1_linux_386.zip
- packagespecid: 192814968c68e1c819d216b95925afb4099aaa07
inputs:
BINARY_NAME: boundary
BUILD_TAGS: ui
@ -228,12 +228,12 @@ packages:
GOARCH: amd64
GOOS: linux
PRODUCT_NAME: boundary
PRODUCT_VERSION: 0.6.0
PRODUCT_VERSION_MMP: 0.6.0
PRODUCT_VERSION: 0.6.1
PRODUCT_VERSION_MMP: 0.6.1
PRODUCT_VERSION_PRE: ""
meta:
BUILD_JOB_NAME: linux_amd64_package
BUNDLE_NAME: boundary_0.6.0
BUNDLE_NAME: boundary_0.6.1
builtin:
BUILD_LAYERS:
- type: copy-source
@ -248,7 +248,7 @@ packages:
- type: base
name: 00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16
archive: .buildcache/archives/00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16.tar.gz
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-809c3d22aa07bb5c66dca99d44efdd2bb37f2525
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-192814968c68e1c819d216b95925afb4099aaa07
POST_PROCESSORS: av-scan
circleci:
BUILDER_CACHE_KEY_PREFIX_LIST:
@ -256,14 +256,14 @@ packages:
- go-modules_daae_{{checksum ".buildcache/cache-keys/go-modules-daaedd9766787e3ce8f199fada1cb849024e6ca3"}}
- ui_8f7b_{{checksum ".buildcache/cache-keys/ui-8f7be629bbc1c7ec7af40fb527b15c30d590b13a"}}
- base_ec53_{{checksum ".buildcache/cache-keys/base-ec5342e30fcbfeb5947252a8ea679fb281052d16"}}
PACKAGE_CACHE_KEY: package-809c3d22aa07bb5c66dca99d44efdd2bb37f2525-{{checksum ".buildcache/cache-keys/package-809c3d22aa07bb5c66dca99d44efdd2bb37f2525"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.0 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
PACKAGE_CACHE_KEY: package-192814968c68e1c819d216b95925afb4099aaa07-{{checksum ".buildcache/cache-keys/package-192814968c68e1c819d216b95925afb4099aaa07"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.1 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
aliases:
- type: local
path: boundary_0.6.0_linux_amd64.zip
path: boundary_0.6.1_linux_amd64.zip
- type: public-hc-releases
path: boundary/boundary_0.6.0/boundary_0.6.0_linux_amd64.zip
- packagespecid: 1efe1513c2b57d32392ed4a4601de1699c813457
path: boundary/boundary_0.6.1/boundary_0.6.1_linux_amd64.zip
- packagespecid: b734ede0c13f22ffc1835626300d3b3226acaf53
inputs:
BINARY_NAME: boundary
BUILD_TAGS: ui
@ -271,12 +271,12 @@ packages:
GOARCH: arm
GOOS: linux
PRODUCT_NAME: boundary
PRODUCT_VERSION: 0.6.0
PRODUCT_VERSION_MMP: 0.6.0
PRODUCT_VERSION: 0.6.1
PRODUCT_VERSION_MMP: 0.6.1
PRODUCT_VERSION_PRE: ""
meta:
BUILD_JOB_NAME: linux_arm_package
BUNDLE_NAME: boundary_0.6.0
BUNDLE_NAME: boundary_0.6.1
builtin:
BUILD_LAYERS:
- type: copy-source
@ -291,7 +291,7 @@ packages:
- type: base
name: 00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16
archive: .buildcache/archives/00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16.tar.gz
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-1efe1513c2b57d32392ed4a4601de1699c813457
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-b734ede0c13f22ffc1835626300d3b3226acaf53
POST_PROCESSORS: av-scan
circleci:
BUILDER_CACHE_KEY_PREFIX_LIST:
@ -299,14 +299,14 @@ packages:
- go-modules_daae_{{checksum ".buildcache/cache-keys/go-modules-daaedd9766787e3ce8f199fada1cb849024e6ca3"}}
- ui_8f7b_{{checksum ".buildcache/cache-keys/ui-8f7be629bbc1c7ec7af40fb527b15c30d590b13a"}}
- base_ec53_{{checksum ".buildcache/cache-keys/base-ec5342e30fcbfeb5947252a8ea679fb281052d16"}}
PACKAGE_CACHE_KEY: package-1efe1513c2b57d32392ed4a4601de1699c813457-{{checksum ".buildcache/cache-keys/package-1efe1513c2b57d32392ed4a4601de1699c813457"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.0 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
PACKAGE_CACHE_KEY: package-b734ede0c13f22ffc1835626300d3b3226acaf53-{{checksum ".buildcache/cache-keys/package-b734ede0c13f22ffc1835626300d3b3226acaf53"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.1 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
aliases:
- type: local
path: boundary_0.6.0_linux_arm.zip
path: boundary_0.6.1_linux_arm.zip
- type: public-hc-releases
path: boundary/boundary_0.6.0/boundary_0.6.0_linux_arm.zip
- packagespecid: 384724ac89d695a73abb78aad60acd3c7c2c4270
path: boundary/boundary_0.6.1/boundary_0.6.1_linux_arm.zip
- packagespecid: a1ae7292c28d5e629eea144b2fafb6f234ca4ffa
inputs:
BINARY_NAME: boundary
BUILD_TAGS: ui
@ -314,12 +314,12 @@ packages:
GOARCH: arm64
GOOS: linux
PRODUCT_NAME: boundary
PRODUCT_VERSION: 0.6.0
PRODUCT_VERSION_MMP: 0.6.0
PRODUCT_VERSION: 0.6.1
PRODUCT_VERSION_MMP: 0.6.1
PRODUCT_VERSION_PRE: ""
meta:
BUILD_JOB_NAME: linux_arm64_package
BUNDLE_NAME: boundary_0.6.0
BUNDLE_NAME: boundary_0.6.1
builtin:
BUILD_LAYERS:
- type: copy-source
@ -334,7 +334,7 @@ packages:
- type: base
name: 00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16
archive: .buildcache/archives/00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16.tar.gz
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-384724ac89d695a73abb78aad60acd3c7c2c4270
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-a1ae7292c28d5e629eea144b2fafb6f234ca4ffa
POST_PROCESSORS: av-scan
circleci:
BUILDER_CACHE_KEY_PREFIX_LIST:
@ -342,14 +342,14 @@ packages:
- go-modules_daae_{{checksum ".buildcache/cache-keys/go-modules-daaedd9766787e3ce8f199fada1cb849024e6ca3"}}
- ui_8f7b_{{checksum ".buildcache/cache-keys/ui-8f7be629bbc1c7ec7af40fb527b15c30d590b13a"}}
- base_ec53_{{checksum ".buildcache/cache-keys/base-ec5342e30fcbfeb5947252a8ea679fb281052d16"}}
PACKAGE_CACHE_KEY: package-384724ac89d695a73abb78aad60acd3c7c2c4270-{{checksum ".buildcache/cache-keys/package-384724ac89d695a73abb78aad60acd3c7c2c4270"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.0 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
PACKAGE_CACHE_KEY: package-a1ae7292c28d5e629eea144b2fafb6f234ca4ffa-{{checksum ".buildcache/cache-keys/package-a1ae7292c28d5e629eea144b2fafb6f234ca4ffa"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.1 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
aliases:
- type: local
path: boundary_0.6.0_linux_arm64.zip
path: boundary_0.6.1_linux_arm64.zip
- type: public-hc-releases
path: boundary/boundary_0.6.0/boundary_0.6.0_linux_arm64.zip
- packagespecid: 45fe69b6571284299223e8245f29c40ffd9c43dd
path: boundary/boundary_0.6.1/boundary_0.6.1_linux_arm64.zip
- packagespecid: b9d9cc299dc96f89f71285c520b4cd244879207a
inputs:
BINARY_NAME: boundary
BUILD_TAGS: ui
@ -357,12 +357,12 @@ packages:
GOARCH: "386"
GOOS: netbsd
PRODUCT_NAME: boundary
PRODUCT_VERSION: 0.6.0
PRODUCT_VERSION_MMP: 0.6.0
PRODUCT_VERSION: 0.6.1
PRODUCT_VERSION_MMP: 0.6.1
PRODUCT_VERSION_PRE: ""
meta:
BUILD_JOB_NAME: netbsd_386_package
BUNDLE_NAME: boundary_0.6.0
BUNDLE_NAME: boundary_0.6.1
builtin:
BUILD_LAYERS:
- type: copy-source
@ -377,7 +377,7 @@ packages:
- type: base
name: 00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16
archive: .buildcache/archives/00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16.tar.gz
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-45fe69b6571284299223e8245f29c40ffd9c43dd
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-b9d9cc299dc96f89f71285c520b4cd244879207a
POST_PROCESSORS: av-scan
circleci:
BUILDER_CACHE_KEY_PREFIX_LIST:
@ -385,14 +385,14 @@ packages:
- go-modules_daae_{{checksum ".buildcache/cache-keys/go-modules-daaedd9766787e3ce8f199fada1cb849024e6ca3"}}
- ui_8f7b_{{checksum ".buildcache/cache-keys/ui-8f7be629bbc1c7ec7af40fb527b15c30d590b13a"}}
- base_ec53_{{checksum ".buildcache/cache-keys/base-ec5342e30fcbfeb5947252a8ea679fb281052d16"}}
PACKAGE_CACHE_KEY: package-45fe69b6571284299223e8245f29c40ffd9c43dd-{{checksum ".buildcache/cache-keys/package-45fe69b6571284299223e8245f29c40ffd9c43dd"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.0 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
PACKAGE_CACHE_KEY: package-b9d9cc299dc96f89f71285c520b4cd244879207a-{{checksum ".buildcache/cache-keys/package-b9d9cc299dc96f89f71285c520b4cd244879207a"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.1 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
aliases:
- type: local
path: boundary_0.6.0_netbsd_386.zip
path: boundary_0.6.1_netbsd_386.zip
- type: public-hc-releases
path: boundary/boundary_0.6.0/boundary_0.6.0_netbsd_386.zip
- packagespecid: e5c674f011608ff0ec51285ea342fe7a95f803e5
path: boundary/boundary_0.6.1/boundary_0.6.1_netbsd_386.zip
- packagespecid: 744fe90a3f2bd01206fbf95ad9d8a8db6b66d84d
inputs:
BINARY_NAME: boundary
BUILD_TAGS: ui
@ -400,12 +400,12 @@ packages:
GOARCH: amd64
GOOS: netbsd
PRODUCT_NAME: boundary
PRODUCT_VERSION: 0.6.0
PRODUCT_VERSION_MMP: 0.6.0
PRODUCT_VERSION: 0.6.1
PRODUCT_VERSION_MMP: 0.6.1
PRODUCT_VERSION_PRE: ""
meta:
BUILD_JOB_NAME: netbsd_amd64_package
BUNDLE_NAME: boundary_0.6.0
BUNDLE_NAME: boundary_0.6.1
builtin:
BUILD_LAYERS:
- type: copy-source
@ -420,7 +420,7 @@ packages:
- type: base
name: 00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16
archive: .buildcache/archives/00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16.tar.gz
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-e5c674f011608ff0ec51285ea342fe7a95f803e5
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-744fe90a3f2bd01206fbf95ad9d8a8db6b66d84d
POST_PROCESSORS: av-scan
circleci:
BUILDER_CACHE_KEY_PREFIX_LIST:
@ -428,14 +428,14 @@ packages:
- go-modules_daae_{{checksum ".buildcache/cache-keys/go-modules-daaedd9766787e3ce8f199fada1cb849024e6ca3"}}
- ui_8f7b_{{checksum ".buildcache/cache-keys/ui-8f7be629bbc1c7ec7af40fb527b15c30d590b13a"}}
- base_ec53_{{checksum ".buildcache/cache-keys/base-ec5342e30fcbfeb5947252a8ea679fb281052d16"}}
PACKAGE_CACHE_KEY: package-e5c674f011608ff0ec51285ea342fe7a95f803e5-{{checksum ".buildcache/cache-keys/package-e5c674f011608ff0ec51285ea342fe7a95f803e5"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.0 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
PACKAGE_CACHE_KEY: package-744fe90a3f2bd01206fbf95ad9d8a8db6b66d84d-{{checksum ".buildcache/cache-keys/package-744fe90a3f2bd01206fbf95ad9d8a8db6b66d84d"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.1 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
aliases:
- type: local
path: boundary_0.6.0_netbsd_amd64.zip
path: boundary_0.6.1_netbsd_amd64.zip
- type: public-hc-releases
path: boundary/boundary_0.6.0/boundary_0.6.0_netbsd_amd64.zip
- packagespecid: 291ec533f804015a31ec89443c1100a2ae526856
path: boundary/boundary_0.6.1/boundary_0.6.1_netbsd_amd64.zip
- packagespecid: 489303d43b63583b456fef92eeef9806a25ef98d
inputs:
BINARY_NAME: boundary
BUILD_TAGS: ui
@ -443,12 +443,12 @@ packages:
GOARCH: "386"
GOOS: openbsd
PRODUCT_NAME: boundary
PRODUCT_VERSION: 0.6.0
PRODUCT_VERSION_MMP: 0.6.0
PRODUCT_VERSION: 0.6.1
PRODUCT_VERSION_MMP: 0.6.1
PRODUCT_VERSION_PRE: ""
meta:
BUILD_JOB_NAME: openbsd_386_package
BUNDLE_NAME: boundary_0.6.0
BUNDLE_NAME: boundary_0.6.1
builtin:
BUILD_LAYERS:
- type: copy-source
@ -463,7 +463,7 @@ packages:
- type: base
name: 00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16
archive: .buildcache/archives/00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16.tar.gz
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-291ec533f804015a31ec89443c1100a2ae526856
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-489303d43b63583b456fef92eeef9806a25ef98d
POST_PROCESSORS: av-scan
circleci:
BUILDER_CACHE_KEY_PREFIX_LIST:
@ -471,14 +471,14 @@ packages:
- go-modules_daae_{{checksum ".buildcache/cache-keys/go-modules-daaedd9766787e3ce8f199fada1cb849024e6ca3"}}
- ui_8f7b_{{checksum ".buildcache/cache-keys/ui-8f7be629bbc1c7ec7af40fb527b15c30d590b13a"}}
- base_ec53_{{checksum ".buildcache/cache-keys/base-ec5342e30fcbfeb5947252a8ea679fb281052d16"}}
PACKAGE_CACHE_KEY: package-291ec533f804015a31ec89443c1100a2ae526856-{{checksum ".buildcache/cache-keys/package-291ec533f804015a31ec89443c1100a2ae526856"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.0 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
PACKAGE_CACHE_KEY: package-489303d43b63583b456fef92eeef9806a25ef98d-{{checksum ".buildcache/cache-keys/package-489303d43b63583b456fef92eeef9806a25ef98d"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.1 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
aliases:
- type: local
path: boundary_0.6.0_openbsd_386.zip
path: boundary_0.6.1_openbsd_386.zip
- type: public-hc-releases
path: boundary/boundary_0.6.0/boundary_0.6.0_openbsd_386.zip
- packagespecid: d2dbe866712854d51e105c9b8b33ff5e22d1287c
path: boundary/boundary_0.6.1/boundary_0.6.1_openbsd_386.zip
- packagespecid: 394f426443d3ce6446a86a2982253a2637c061e0
inputs:
BINARY_NAME: boundary
BUILD_TAGS: ui
@ -486,12 +486,12 @@ packages:
GOARCH: amd64
GOOS: openbsd
PRODUCT_NAME: boundary
PRODUCT_VERSION: 0.6.0
PRODUCT_VERSION_MMP: 0.6.0
PRODUCT_VERSION: 0.6.1
PRODUCT_VERSION_MMP: 0.6.1
PRODUCT_VERSION_PRE: ""
meta:
BUILD_JOB_NAME: openbsd_amd64_package
BUNDLE_NAME: boundary_0.6.0
BUNDLE_NAME: boundary_0.6.1
builtin:
BUILD_LAYERS:
- type: copy-source
@ -506,7 +506,7 @@ packages:
- type: base
name: 00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16
archive: .buildcache/archives/00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16.tar.gz
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-d2dbe866712854d51e105c9b8b33ff5e22d1287c
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-394f426443d3ce6446a86a2982253a2637c061e0
POST_PROCESSORS: av-scan
circleci:
BUILDER_CACHE_KEY_PREFIX_LIST:
@ -514,14 +514,14 @@ packages:
- go-modules_daae_{{checksum ".buildcache/cache-keys/go-modules-daaedd9766787e3ce8f199fada1cb849024e6ca3"}}
- ui_8f7b_{{checksum ".buildcache/cache-keys/ui-8f7be629bbc1c7ec7af40fb527b15c30d590b13a"}}
- base_ec53_{{checksum ".buildcache/cache-keys/base-ec5342e30fcbfeb5947252a8ea679fb281052d16"}}
PACKAGE_CACHE_KEY: package-d2dbe866712854d51e105c9b8b33ff5e22d1287c-{{checksum ".buildcache/cache-keys/package-d2dbe866712854d51e105c9b8b33ff5e22d1287c"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.0 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
PACKAGE_CACHE_KEY: package-394f426443d3ce6446a86a2982253a2637c061e0-{{checksum ".buildcache/cache-keys/package-394f426443d3ce6446a86a2982253a2637c061e0"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.1 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
aliases:
- type: local
path: boundary_0.6.0_openbsd_amd64.zip
path: boundary_0.6.1_openbsd_amd64.zip
- type: public-hc-releases
path: boundary/boundary_0.6.0/boundary_0.6.0_openbsd_amd64.zip
- packagespecid: 4571f72423cae365aaad07ec1a16fffe7aaa468e
path: boundary/boundary_0.6.1/boundary_0.6.1_openbsd_amd64.zip
- packagespecid: e13e5614c5583baf504ebe29fbf99c21f9447926
inputs:
BINARY_NAME: boundary
BUILD_TAGS: ui
@ -529,12 +529,12 @@ packages:
GOARCH: amd64
GOOS: solaris
PRODUCT_NAME: boundary
PRODUCT_VERSION: 0.6.0
PRODUCT_VERSION_MMP: 0.6.0
PRODUCT_VERSION: 0.6.1
PRODUCT_VERSION_MMP: 0.6.1
PRODUCT_VERSION_PRE: ""
meta:
BUILD_JOB_NAME: solaris_amd64_package
BUNDLE_NAME: boundary_0.6.0
BUNDLE_NAME: boundary_0.6.1
builtin:
BUILD_LAYERS:
- type: copy-source
@ -549,7 +549,7 @@ packages:
- type: base
name: 00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16
archive: .buildcache/archives/00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16.tar.gz
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-4571f72423cae365aaad07ec1a16fffe7aaa468e
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-e13e5614c5583baf504ebe29fbf99c21f9447926
POST_PROCESSORS: av-scan
circleci:
BUILDER_CACHE_KEY_PREFIX_LIST:
@ -557,14 +557,14 @@ packages:
- go-modules_daae_{{checksum ".buildcache/cache-keys/go-modules-daaedd9766787e3ce8f199fada1cb849024e6ca3"}}
- ui_8f7b_{{checksum ".buildcache/cache-keys/ui-8f7be629bbc1c7ec7af40fb527b15c30d590b13a"}}
- base_ec53_{{checksum ".buildcache/cache-keys/base-ec5342e30fcbfeb5947252a8ea679fb281052d16"}}
PACKAGE_CACHE_KEY: package-4571f72423cae365aaad07ec1a16fffe7aaa468e-{{checksum ".buildcache/cache-keys/package-4571f72423cae365aaad07ec1a16fffe7aaa468e"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.0 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
PACKAGE_CACHE_KEY: package-e13e5614c5583baf504ebe29fbf99c21f9447926-{{checksum ".buildcache/cache-keys/package-e13e5614c5583baf504ebe29fbf99c21f9447926"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.1 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary
aliases:
- type: local
path: boundary_0.6.0_solaris_amd64.zip
path: boundary_0.6.1_solaris_amd64.zip
- type: public-hc-releases
path: boundary/boundary_0.6.0/boundary_0.6.0_solaris_amd64.zip
- packagespecid: 93124fe6bd57406690ddeef8d33ee90d67f3eb04
path: boundary/boundary_0.6.1/boundary_0.6.1_solaris_amd64.zip
- packagespecid: 3fc99d62bf311ae70f4e4d9fd3f2a063691f00b9
inputs:
BINARY_NAME: boundary.exe
BUILD_TAGS: ui
@ -572,12 +572,12 @@ packages:
GOARCH: "386"
GOOS: windows
PRODUCT_NAME: boundary
PRODUCT_VERSION: 0.6.0
PRODUCT_VERSION_MMP: 0.6.0
PRODUCT_VERSION: 0.6.1
PRODUCT_VERSION_MMP: 0.6.1
PRODUCT_VERSION_PRE: ""
meta:
BUILD_JOB_NAME: windows_386_package
BUNDLE_NAME: boundary_0.6.0
BUNDLE_NAME: boundary_0.6.1
builtin:
BUILD_LAYERS:
- type: copy-source
@ -592,7 +592,7 @@ packages:
- type: base
name: 00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16
archive: .buildcache/archives/00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16.tar.gz
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-93124fe6bd57406690ddeef8d33ee90d67f3eb04
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-3fc99d62bf311ae70f4e4d9fd3f2a063691f00b9
POST_PROCESSORS: microsoft-notarize av-scan
circleci:
BUILDER_CACHE_KEY_PREFIX_LIST:
@ -600,14 +600,14 @@ packages:
- go-modules_daae_{{checksum ".buildcache/cache-keys/go-modules-daaedd9766787e3ce8f199fada1cb849024e6ca3"}}
- ui_8f7b_{{checksum ".buildcache/cache-keys/ui-8f7be629bbc1c7ec7af40fb527b15c30d590b13a"}}
- base_ec53_{{checksum ".buildcache/cache-keys/base-ec5342e30fcbfeb5947252a8ea679fb281052d16"}}
PACKAGE_CACHE_KEY: package-93124fe6bd57406690ddeef8d33ee90d67f3eb04-{{checksum ".buildcache/cache-keys/package-93124fe6bd57406690ddeef8d33ee90d67f3eb04"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.0 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary.exe ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary.exe
PACKAGE_CACHE_KEY: package-3fc99d62bf311ae70f4e4d9fd3f2a063691f00b9-{{checksum ".buildcache/cache-keys/package-3fc99d62bf311ae70f4e4d9fd3f2a063691f00b9"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.1 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary.exe ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary.exe
aliases:
- type: local
path: boundary_0.6.0_windows_386.zip
path: boundary_0.6.1_windows_386.zip
- type: public-hc-releases
path: boundary/boundary_0.6.0/boundary_0.6.0_windows_386.zip
- packagespecid: a60bb9fef193b738fd139723936b23ea38b18d25
path: boundary/boundary_0.6.1/boundary_0.6.1_windows_386.zip
- packagespecid: 7f7bcfc7f39c7e62aeba7d94b345a3ebdf2ab7f8
inputs:
BINARY_NAME: boundary.exe
BUILD_TAGS: ui
@ -615,12 +615,12 @@ packages:
GOARCH: amd64
GOOS: windows
PRODUCT_NAME: boundary
PRODUCT_VERSION: 0.6.0
PRODUCT_VERSION_MMP: 0.6.0
PRODUCT_VERSION: 0.6.1
PRODUCT_VERSION_MMP: 0.6.1
PRODUCT_VERSION_PRE: ""
meta:
BUILD_JOB_NAME: windows_amd64_package
BUNDLE_NAME: boundary_0.6.0
BUNDLE_NAME: boundary_0.6.1
builtin:
BUILD_LAYERS:
- type: copy-source
@ -635,7 +635,7 @@ packages:
- type: base
name: 00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16
archive: .buildcache/archives/00-base-ec5342e30fcbfeb5947252a8ea679fb281052d16.tar.gz
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-a60bb9fef193b738fd139723936b23ea38b18d25
PACKAGE_CACHE_KEY_FILE: .buildcache/cache-keys/package-7f7bcfc7f39c7e62aeba7d94b345a3ebdf2ab7f8
POST_PROCESSORS: microsoft-notarize av-scan
circleci:
BUILDER_CACHE_KEY_PREFIX_LIST:
@ -643,13 +643,13 @@ packages:
- go-modules_daae_{{checksum ".buildcache/cache-keys/go-modules-daaedd9766787e3ce8f199fada1cb849024e6ca3"}}
- ui_8f7b_{{checksum ".buildcache/cache-keys/ui-8f7be629bbc1c7ec7af40fb527b15c30d590b13a"}}
- base_ec53_{{checksum ".buildcache/cache-keys/base-ec5342e30fcbfeb5947252a8ea679fb281052d16"}}
PACKAGE_CACHE_KEY: package-a60bb9fef193b738fd139723936b23ea38b18d25-{{checksum ".buildcache/cache-keys/package-a60bb9fef193b738fd139723936b23ea38b18d25"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.0 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary.exe ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary.exe
PACKAGE_CACHE_KEY: package-7f7bcfc7f39c7e62aeba7d94b345a3ebdf2ab7f8-{{checksum ".buildcache/cache-keys/package-7f7bcfc7f39c7e62aeba7d94b345a3ebdf2ab7f8"}}
build-command: VERSION_PKG_PATH=github.com/hashicorp/boundary/version; unset GOPATH; go build -v -tags 'ui' -ldflags "-X $VERSION_PKG_PATH.GitCommit=$PACKAGE_SOURCE_ID -X $VERSION_PKG_PATH.Version=0.6.1 -X $VERSION_PKG_PATH.VersionPrerelease=" -o $OUTPUT_DIR/boundary.exe ./cmd/boundary && cd $OUTPUT_DIR && zip $PACKAGE_ZIP_NAME boundary.exe
aliases:
- type: local
path: boundary_0.6.0_windows_amd64.zip
path: boundary_0.6.1_windows_amd64.zip
- type: public-hc-releases
path: boundary/boundary_0.6.0/boundary_0.6.0_windows_amd64.zip
path: boundary/boundary_0.6.1/boundary_0.6.1_windows_amd64.zip
base-image: docker.mirror.hashicorp.services/golang@sha256:448a13037d13401ad9b31fabf91d6b8a3c5c35d336cc8af760b2ab4ed85d4155
layers:
- depth: 0

@ -10,7 +10,7 @@ config:
inputs:
defaults:
PRODUCT_NAME: boundary
PRODUCT_VERSION: 0.6.0
PRODUCT_VERSION: 0.6.1
BUILD_TAGS: ui
CGO_ENABLED: 0

@ -13,10 +13,8 @@ import (
var testDBPort = "5432"
var (
// StartUsingTemplate creates a new test database from a postgres template database.
StartUsingTemplate func(dialect string, opt ...Option) (func() error, string, string, error) = startUsingTemplate
)
// StartUsingTemplate creates a new test database from a postgres template database.
var StartUsingTemplate func(dialect string, opt ...Option) (func() error, string, string, error) = startUsingTemplate
func init() {
rand.Seed(time.Now().UnixNano())
@ -39,12 +37,12 @@ const (
)
var supportedDialects = map[string]struct{}{
Postgres: struct{}{},
Postgres: {},
}
var supportedTemplates = map[string]struct{}{
BoundaryTemplate: struct{}{},
Template1: struct{}{},
BoundaryTemplate: {},
Template1: {},
}
const letterBytes = "abcdefghijklmnopqrstuvwxyz"

@ -0,0 +1,29 @@
---
layout: docs
page_title: v0.5.0
description: |-
Boundary release notes for v0.5.0
---
# [Boundary v0.5.0](https://www.boundaryproject.io/downloads)
The release notes below contain information about new functionality available in Boundary v0.5.0.
To see a granular record of when each item was merged into the Boundary project, please refer to the [Changelog](https://github.com/hashicorp/boundary/blob/main/CHANGELOG.md).
To learn about what Boundary consists of, we highly recommend you start at the [Getting Started Page](/docs/getting-started).
Lastly, for instructions on how to upgrade an existing Boundary deployment to v0.5.0, please review Boundarys [general upgrade guide](https://learn.hashicorp.com/tutorials/boundary/upgrade-version).
## Boundary v0.5.0 Highlights
**Event Logging:** 0.5.0 includes significant improvements to Boundary's event logging system with goal of giving operators greater visibility into system information in a well-defined, structured format.
When enabled, event logs are the only type of logging Boundary performs, meaning standard system information and debug logs will no longer appear in stdout.
The new structured event logs are filterable by event type and other expressions that users define. Legacy HCLog output is still supported through a user-configurable setting.
For more about event logging usage, see our [documentation](https://www.boundaryproject.io/docs/configuration/events/overview).
**Credential CRUD operations for Administrative Console:** The 0.5.0 release enables administration to configure Vault credential libraries and Vault credential set resources through the administrative console.
Vault credential resources allows users to create Boundary sessions with credentials brokered by a Vault deployment.
## What's Changed
For more detailed information of all changes since 0.4.0, please refer to the [Changelog](https://github.com/hashicorp/boundary/blob/main/CHANGELOG.md)

@ -0,0 +1,26 @@
---
layout: docs
page_title: v0.6.0
description: |-
Boundary release notes for v0.6.0
---
# [Boundary v0.6.0](https://www.boundaryproject.io/downloads)
The release notes below contain information about new functionality available in Boundary v0.6.0 and the corresponding Boundary Desktop v1.3.0 and Boundary Terraform Provider v1.0.5 releases.
To see a granular record of when each item was merged into the Boundary project, please refer to the [Changelog](https://github.com/hashicorp/boundary/blob/main/CHANGELOG.md).
To learn about what Boundary consists of, we highly recommend you start at the [Getting Started Page](/docs/getting-started).
Lastly, for instructions on how to upgrade an existing Boundary deployment to v0.6.0, please review Boundarys [general upgrade guide](https://learn.hashicorp.com/tutorials/boundary/upgrade-version).
## Boundary v0.6.0 Highlights
**Permissions-based UI:** Boundary 0.6.0 dynamically tailors the administrator console to individual users permissions, ensuring users are presented only with actions and workflows that can succeed.
**Linux Support for Boundary Desktop:** Boundary Desktop 1.3 adds support for Debian-based Linux distributions in addition to existing macOS and Windows support.
**Managed group configurations via Terraform:** Managed groups can now be configured via Terraform using v1.0.5 of [Boundary's Terraform Provider](https://registry.terraform.io/providers/hashicorp/boundary/latest)
## What's Changed
For more detailed information of all changes since 0.5.0, please refer to the [Changelog](https://github.com/hashicorp/boundary/blob/main/CHANGELOG.md)

@ -390,9 +390,17 @@
{
"title": "v0.4.0",
"path": "releases/release-notes/v0_4_0"
},
{
"title": "v0.5.0",
"path": "releases/release-notes/v0_5_0"
},
{
"title": "v0.6.0",
"path": "releases/release-notes/v0_6_0"
}
]
}
]
}
]
]

@ -1,2 +1,2 @@
export const VERSION = '0.6.0'
export const VERSION = '0.6.1'
export const DESKTOP_VERSION = '1.3.0'

Loading…
Cancel
Save