k
This commit is contained in:
parent
cb8885a21b
commit
8b54db427a
14 changed files with 537 additions and 7 deletions
27
README.md
27
README.md
|
|
@ -1,6 +1,6 @@
|
|||
# TALA
|
||||
|
||||
TALA is Terrastruct's proprietary diagram layout engine, made for
|
||||
TALA is a general-purpose diagram layout engine, made for
|
||||
[D2](https://github.com/terrastruct/d2). TALA is closed-source (for now). This repository
|
||||
is primarily for installation instructions. You can also use this repository to report
|
||||
issues, ask questions, and request features.
|
||||
|
|
@ -11,6 +11,15 @@ To learn more about TALA, please visit
|
|||
To compare TALA with other layout engines, please visit
|
||||
[https://text-to-diagram.com](https://text-to-diagram.com).
|
||||
|
||||
## Rendering samples
|
||||
|
||||
You can see the `.d2` text for these in [./docs/d2](./docs/d2). Samples are generated
|
||||
through the CLI ([./ci/generate_samples.sh](./docs/d2)).
|
||||
|
||||
|<img src="./docs/assets/sample_1.svg" />|<img src="./docs/assets/sample_2.svg" />|<img src="./docs/assets/sample_3.svg" />|
|
||||
|:-------------------------:|:-------------------------:|:-------------------------:|
|
||||
|<img src="./docs/assets/sample_4.svg" />|<img src="./docs/assets/sample_5.svg" />|<img src="./docs/assets/sample_6.svg" />|
|
||||
|
||||
## Installation
|
||||
|
||||
### Prerequisites
|
||||
|
|
@ -108,9 +117,13 @@ like to remain anonymous/private, feel free to email us at info@terrastruct.com
|
|||
|
||||
### Does TALA use the internet?
|
||||
|
||||
We do not collect telemetry or use the internet in any way except to ping to check the
|
||||
status of a license. This is only done when necessary, e.g. if you purchased a month
|
||||
subscription, TALA will ping at the end of that month and renew automatically if the
|
||||
subscription is ongoing. If you purchased a year, it won't ping for a year. The only data
|
||||
that's sent in these pings is the API token itself. No diagrams or anything else leaves
|
||||
your computer.
|
||||
The TALA plugin do not collect telemetry or use the internet in any way except to ping to
|
||||
check the status of a license. This is only done when necessary, e.g. if you purchased a
|
||||
month subscription, TALA will ping at the start of the next month and renew automatically
|
||||
if the subscription is ongoing. If you purchased a year, it won't ping for a year. The
|
||||
only data that's sent in these pings is the API token itself. No diagrams or anything
|
||||
else leaves your computer.
|
||||
|
||||
### Others
|
||||
|
||||
Full FAQ at [https://terrastruct.com/tala](https://terrastruct.com/tala).
|
||||
|
|
|
|||
18
ci/generate_samples.sh
Executable file
18
ci/generate_samples.sh
Executable file
|
|
@ -0,0 +1,18 @@
|
|||
#!/bin/sh
|
||||
set -eu
|
||||
cd -- "$(dirname "$0")/.."
|
||||
|
||||
THEME_INDEX=0
|
||||
|
||||
# alternate between cool and warm
|
||||
themes=(0 100 1 102 4 104 5 105 6)
|
||||
|
||||
for f in ./docs/d2/*.d2
|
||||
do
|
||||
echo "Processing $f"
|
||||
filename=$(basename -- "$f")
|
||||
filename="${filename%.*}"
|
||||
D2_LAYOUT=tala d2 --theme=${themes[$THEME_INDEX]} --debug $f ./docs/assets/${filename}.svg
|
||||
|
||||
let THEME_INDEX=${THEME_INDEX}+1
|
||||
done
|
||||
66
docs/assets/sample_1.svg
Normal file
66
docs/assets/sample_1.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 799 KiB |
31
docs/assets/sample_2.svg
Normal file
31
docs/assets/sample_2.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 476 KiB |
31
docs/assets/sample_3.svg
Normal file
31
docs/assets/sample_3.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 472 KiB |
24
docs/assets/sample_4.svg
Normal file
24
docs/assets/sample_4.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 331 KiB |
39
docs/assets/sample_5.svg
Normal file
39
docs/assets/sample_5.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 477 KiB |
24
docs/assets/sample_6.svg
Normal file
24
docs/assets/sample_6.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 331 KiB |
39
docs/d2/sample_1.d2
Normal file
39
docs/d2/sample_1.d2
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
runner: JobRunner {
|
||||
shape: class
|
||||
|
||||
-start: datetime
|
||||
-end: datetime
|
||||
-running_server: string
|
||||
-threads: int
|
||||
-manager: JobsManager
|
||||
+pipeline: Pipeline
|
||||
|
||||
+setPipeline(Pipeline p): void
|
||||
+kickoff(threads int): bool
|
||||
}
|
||||
|
||||
jobsUI: JobRunner UI {
|
||||
kickoff
|
||||
halt
|
||||
}
|
||||
|
||||
batch: Batch {
|
||||
manager: BatchManager {
|
||||
shape: class
|
||||
-num: int
|
||||
-timeout: int
|
||||
-pid
|
||||
|
||||
+getStatus(): Enum
|
||||
+getJobs(): "Job[]"
|
||||
+setTimeout(seconds int)
|
||||
}
|
||||
systemd: Systemd
|
||||
selenium: Selenium
|
||||
|
||||
systemd -> manager: Ensure alive
|
||||
manager -> selenium: Run job
|
||||
}
|
||||
|
||||
jobsUI -> runner: Kick off
|
||||
runner -> batch.manager: Queue jobs
|
||||
44
docs/d2/sample_2.d2
Normal file
44
docs/d2/sample_2.d2
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
logs: {
|
||||
shape: page
|
||||
style.multiple: true
|
||||
}
|
||||
user: AT&T Customer {shape: person}
|
||||
network: Network {
|
||||
tower: Cell Tower {
|
||||
satellites: {
|
||||
shape: stored_data
|
||||
style.multiple: true
|
||||
}
|
||||
transmitter
|
||||
|
||||
satellites -> transmitter
|
||||
satellites -> transmitter
|
||||
satellites -> transmitter
|
||||
}
|
||||
processor: Data Processor {
|
||||
storage: Storage {
|
||||
shape: cylinder
|
||||
style.multiple: true
|
||||
}
|
||||
}
|
||||
portal: Online Portal {
|
||||
UI
|
||||
}
|
||||
|
||||
tower.transmitter -> processor: phone logs
|
||||
}
|
||||
server: API Server
|
||||
|
||||
user -> network.tower: Make call
|
||||
network.processor -> server
|
||||
network.processor -> server
|
||||
network.processor -> server
|
||||
|
||||
server -> logs
|
||||
server -> logs
|
||||
server -> logs: persist
|
||||
|
||||
server -> network.portal.UI: display
|
||||
user -> network.portal.UI: access {
|
||||
style.stroke-dash: 3
|
||||
}
|
||||
31
docs/d2/sample_3.d2
Normal file
31
docs/d2/sample_3.d2
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
SSR: Server side render {
|
||||
templates: User defined templates
|
||||
|
||||
tests: Validation tests
|
||||
|
||||
engine: Rendering Engine\n API {
|
||||
ingestion: Ingestion module {
|
||||
shape: hexagon
|
||||
}
|
||||
fetch: Data fetching module {
|
||||
shape: hexagon
|
||||
}
|
||||
schema: Schema version module {
|
||||
shape: hexagon
|
||||
}
|
||||
}
|
||||
next: NextJS
|
||||
db: Data
|
||||
|
||||
templates -> engine.ingestion
|
||||
engine.fetch <-> db: Integrate user data
|
||||
engine.schema <-> db: Get version
|
||||
|
||||
engine <-> tests
|
||||
}
|
||||
|
||||
build: Final build {
|
||||
html: Rendered HTML
|
||||
}
|
||||
|
||||
SSR.engine -> SSR.next -> build.html
|
||||
48
docs/d2/sample_4.d2
Normal file
48
docs/d2/sample_4.d2
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
payment
|
||||
|
||||
AWS {
|
||||
orchestrator: Orchestrator
|
||||
airflow: Apache Airflow {
|
||||
queue1: Queue 1 {
|
||||
shape: queue
|
||||
}
|
||||
queue2: Queue 2 {
|
||||
shape: queue
|
||||
}
|
||||
queue3: Queue 3 {
|
||||
shape: queue
|
||||
}
|
||||
queue4: Queue 4 {
|
||||
shape: queue
|
||||
}
|
||||
}
|
||||
|
||||
orchestrator -> airflow.queue1
|
||||
orchestrator -> airflow.queue2
|
||||
orchestrator -> airflow.queue3
|
||||
orchestrator -> airflow.queue4
|
||||
}
|
||||
|
||||
payment -> AWS.orchestrator
|
||||
|
||||
backup: Data backup
|
||||
|
||||
AWS.airflow.queue3 -> backup
|
||||
AWS.airflow.queue4 -> backup
|
||||
|
||||
data: Data warehouse
|
||||
|
||||
AWS.airflow.queue1 -> data
|
||||
AWS.airflow.queue2 -> data
|
||||
|
||||
local: On-prem backups {
|
||||
queue1: Queue 1 {
|
||||
shape: queue
|
||||
}
|
||||
queue2: Queue 2 {
|
||||
shape: queue
|
||||
}
|
||||
}
|
||||
|
||||
backup -> local.queue1
|
||||
backup -> local.queue2
|
||||
73
docs/d2/sample_5.d2
Normal file
73
docs/d2/sample_5.d2
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
tenant: Offsite Tenant {
|
||||
style.shadow: true
|
||||
|
||||
accountant: Accountant
|
||||
security: Security
|
||||
janitor: Janitor
|
||||
}
|
||||
|
||||
tenant.accountant -> Quickbooks
|
||||
|
||||
auditors: Auditors
|
||||
|
||||
auditors -> Quickbooks
|
||||
auditors -> IRS
|
||||
Quickbooks -> Clients
|
||||
|
||||
dataroom: E335 Data Room {
|
||||
style.shadow: true
|
||||
style.fill: "#e9edef"
|
||||
style.stroke-width: 2
|
||||
|
||||
whitelist: Whitelisted Server {
|
||||
shape: rectangle
|
||||
style.stroke-dash: 10
|
||||
style.stroke: "#000E3D"
|
||||
|
||||
routing: Routing blocker
|
||||
headers: Add header tokens
|
||||
}
|
||||
|
||||
platform: Microsoft Windows Fleet {
|
||||
style.stroke-dash: 4
|
||||
style.stroke: "#000E3D"
|
||||
build
|
||||
}
|
||||
whitelist <-> platform: Maintain connection {style.stroke-width: 4}
|
||||
}
|
||||
|
||||
auditors -> dataroom.platform.build
|
||||
auditors -> dataroom.whitelist.routing
|
||||
tenant.accountants -> dataroom
|
||||
|
||||
logging: Web based logger
|
||||
|
||||
production: Data for prod {
|
||||
style.shadow: true
|
||||
|
||||
network: Network availability {
|
||||
style.stroke-dash: 10
|
||||
style.stroke: "#000E3D"
|
||||
|
||||
instances: App instances {
|
||||
style.multiple: true
|
||||
}
|
||||
|
||||
db: App DBs {
|
||||
shape: cylinder
|
||||
}
|
||||
|
||||
secrets: AWS Secrets
|
||||
|
||||
containers: Docker containers
|
||||
|
||||
instances -> db
|
||||
instances -> secrets
|
||||
containers -> db
|
||||
containers -> secrets
|
||||
}
|
||||
}
|
||||
|
||||
dataroom.whitelist.routing -> production.network.instances
|
||||
dataroom.whitelist.headers -> production.network.instances
|
||||
production.network.instances -> backups
|
||||
49
docs/d2/sample_6.d2
Normal file
49
docs/d2/sample_6.d2
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
ibm: IBM "Espresso" CPU {
|
||||
core0: IBM PowerPC "Broadway" Core 0
|
||||
core1: IBM PowerPC "Broadway" Core 1
|
||||
core2: IBM PowerPC "Broadway" Core 2
|
||||
|
||||
rom: 16 KB ROM
|
||||
|
||||
core0 -- core2
|
||||
|
||||
rom -> core2
|
||||
}
|
||||
|
||||
amd: AMD "Latte" GPU {
|
||||
mem: Memory & I/O Bridge
|
||||
dram: DRAM Controller
|
||||
edram: 32 MB EDRAM "MEM1"
|
||||
rom: 512 B SEEPROM
|
||||
|
||||
sata: SATA IF
|
||||
exi: EXI (Efficient XML Interchange)
|
||||
|
||||
gx: GX {
|
||||
3 MB 1T-SRAM
|
||||
}
|
||||
|
||||
radeon: AMD Radeon R7xx "GX2"
|
||||
|
||||
mem -- gx
|
||||
mem -- radeon
|
||||
|
||||
|
||||
rom -- mem
|
||||
|
||||
mem -- sata
|
||||
mem -- exi
|
||||
|
||||
dram -- sata
|
||||
dram -- exi
|
||||
}
|
||||
|
||||
ddr3: 2 GB DDR3 RAM "MEM2"
|
||||
|
||||
amd.mem -- ddr3
|
||||
amd.dram -- ddr3
|
||||
amd.edram -- ddr3
|
||||
|
||||
ibm.core1 -- amd.mem
|
||||
|
||||
amd.exi -- RTC
|
||||
Loading…
Reference in a new issue