~patmaddox

Check-in Differences
Login

Many hyperlinks are disabled.
Use anonymous login to enable hyperlinks.

Difference From trunk To wip

2024-05-07
09:55
patmaddox.com: haproxy.conf Leaf check-in: f647c630fa user: patmaddox tags: trunk, infra, site/patmaddox.com
09:54
patmaddox.com: Makefile to update certs check-in: 414ae8d0c9 user: patmaddox tags: trunk, infra, site/patmaddox.com
2024-04-19
03:47
merge trunk Closed-Leaf check-in: aa35b6ca50 user: patmaddox tags: wip
2024-04-18
13:34
infra: gulliver only needs specific graphics packages check-in: 3e682e44ae user: patmaddox
2024-04-17
18:10
merge main - move infra to top level check-in: 1e731f98ad user: patmaddox tags: wip

Added .cshrc.























































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
# $FreeBSD$
#
# .cshrc - csh resource script, read at beginning of execution by each shell
#
# see also csh(1), environ(7).
# more examples available at /usr/share/examples/csh/
#

alias h		history 25
alias j		jobs -l
alias la	ls -aF
alias lf	ls -FA
alias ll	ls -lAF

# These are normally set through /etc/login.conf.  You may override them here
# if wanted.
# set path = (/sbin /bin /usr/sbin /usr/bin /usr/local/sbin /usr/local/bin $HOME/bin)
# A righteous umask
# umask 22

setenv	EDITOR	vi
setenv	PAGER	less

if ($?prompt) then
	# An interactive shell -- set some stuff up
	set prompt = "%N@%m:%~ %# "
	set promptchars = "%#"

	set filec
	set history = 1000
	set savehist = (1000 merge)
	set autolist = ambiguous
	# Use history to aid expansion
	set autoexpand
	set autorehash
	set mail = (/var/mail/$USER)
	if ( $?tcsh ) then
		bindkey "^W" backward-delete-word
		bindkey -k up history-search-backward
		bindkey -k down history-search-forward
	endif

endif

Added .dbt/profiles.yml.

























>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
duckdb-small:
  outputs:
    dev:
      path: /home/patmaddox/ratio/git/snowtest-gendata/gendata/db/small/source_data.duckdb
      type: duckdb
  target: dev

dbt_test:
  target: dev
  outputs:
    dev:
      type: duckdb

Changes to .emacs.d/init.el.

30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
	    (define-key markdown-mode-map (kbd "M-<up>") 'markdown-move-up)
	    (define-key markdown-mode-map (kbd "M-<down>") 'markdown-move-down)))

(message "init.el: UI settings")
(menu-bar-mode -1)
(toggle-scroll-bar -1)
(tool-bar-mode -1)
(if (member (system-name) '("gulliver"))
    (add-to-list 'default-frame-alist
		 '(font . "Monospace-18")))
(add-to-list 'default-frame-alist
	     '(fullscreen . maximized))

;; projectile
;(straight-use-package 'projectile)
;(define-key projectile-mode-map (kbd "C-c p") 'projectile-command-map)
;(projectile-mode +1)







|

|







30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
	    (define-key markdown-mode-map (kbd "M-<up>") 'markdown-move-up)
	    (define-key markdown-mode-map (kbd "M-<down>") 'markdown-move-down)))

(message "init.el: UI settings")
(menu-bar-mode -1)
(toggle-scroll-bar -1)
(tool-bar-mode -1)
;(if (not (member (system-name) '("gulliver")))
    (add-to-list 'default-frame-alist
		 '(font . "Monospace-14"));)
(add-to-list 'default-frame-alist
	     '(fullscreen . maximized))

;; projectile
;(straight-use-package 'projectile)
;(define-key projectile-mode-map (kbd "C-c p") 'projectile-command-map)
;(projectile-mode +1)

Changes to .gitignore.

1




























*~





























>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
*~
.sqlite_history
.vscode-oss
/.ICEauthority
/.Xauthority
/.cache
/.config
/.dbus
/.emacs.d/eshell/history
/.emacs.d/projectile-bookmarks.eld
/.emacs.d/projectile.cache
/.emacs.d/straight/build
/.emacs.d/straight/build-cache.el
/.emacs.d/straight/repos
/.emacs.d/tramp
/.emacs.d/transient
/.hex
/.kyua
/.lesshst
/.local
/.mix
/.mozilla
/.serverauth.*
/.ssh
/Downloads
/oss
/priv
/ratio
trees

Changes to .shrc.

1
2
3
4
5
6
7
8
9



10
11
12
13
14
15
16
17
18
19
20

21
22
23
24

25
26
27
28
29
30
31
# $FreeBSD$
#
# .shrc - bourne shell startup file 
#
# This file will be used if the shell is invoked for interactive use and
# the environment variable ENV is set to this file.
#
# see also sh(1), environ(7).
#



# file permissions: rwxr-xr-x
#
# umask	022

# Uncomment this to enable the builtin vi(1) command line editor in sh(1),
# e.g. ESC to go into visual mode.
# set -o vi

# some useful aliases
alias f=fossil
alias h='fc -l'

alias m="$PAGER"
alias ll='ls -laFo'
alias l='ls -l'
alias g='egrep -i'


# # be paranoid
# alias cp='cp -ip'
# alias mv='mv -i'
# alias rm='rm -i'

# # csh like history on arrow up and down









>
>
>











>




>







1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
# $FreeBSD$
#
# .shrc - bourne shell startup file 
#
# This file will be used if the shell is invoked for interactive use and
# the environment variable ENV is set to this file.
#
# see also sh(1), environ(7).
#
export FOO=foo


# file permissions: rwxr-xr-x
#
# umask	022

# Uncomment this to enable the builtin vi(1) command line editor in sh(1),
# e.g. ESC to go into visual mode.
# set -o vi

# some useful aliases
alias f=fossil
alias h='fc -l'
alias j='just -f ~/Justfile'
alias m="$PAGER"
alias ll='ls -laFo'
alias l='ls -l'
alias g='egrep -i'
alias t='xfce4-terminal --tab'

# # be paranoid
# alias cp='cp -ip'
# alias mv='mv -i'
# alias rm='rm -i'

# # csh like history on arrow up and down
44
45
46
47
48
49
50

















# set prompt: ``username@hostname:directory $ '' 
PS1="\u@\h:\w \\$ "

# search path for cd(1)
# CDPATH=:$HOME

export SSH_AUTH_SOCK=$HOME/.ssh/auth-sock
























>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
# set prompt: ``username@hostname:directory $ '' 
PS1="\u@\h:\w \\$ "

# search path for cd(1)
# CDPATH=:$HOME

export SSH_AUTH_SOCK=$HOME/.ssh/auth-sock

# fossil change branch
fb() {
    if [ -z $1 ]; then
	fossil branch
    else
	branchdir=~/_patmaddox/$1
	if [ ! -f $branchdir/.fslckout ]; then
	    mkdir -p $branchdir
	    fossil open ~/fossils/patmaddox.com.fossil --nested --workdir $branchdir > /dev/null 2>&1
	fi
	cd $branchdir
	if fossil branch list | grep "^${1}$"; then
	    fossil up $1
	fi
    fi
}

Added .templates/bsd-prog/Makefile.in.













>
>
>
>
>
>
1
2
3
4
5
6
PROG=		%%PROG%%
CFLAGS+=	-Wall -Werror -Wextra -pedantic
MK_MAN=		no
#LDADD=		-lm

.include <bsd.prog.mk>

Added .templates/bsd-prog/bsd-prog.mk.





















>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
TEMPLATEDIR:=	${.PARSEDIR}

.PHONY: all

.ifndef PROG
.error must assign PROG var
.endif

all:
	sed -e 's|%%PROG%%|${PROG}|g' ${TEMPLATEDIR}/Makefile.in > Makefile

Changes to .xinitrc.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
#. /usr/local/etc/xdg/xfce4/xinitrc

# find device: xinput
# find properties: xinput list-props $id

case $(hostname -s) in
    gulliver)
	touchpad='SynPS/2 Synaptics TouchPad'
	# natural scrolling
	xinput --set-prop "${touchpad}" 'libinput Accel Speed' 0.6
	# acceleration
	xinput --set-prop "${touchpad}" 'libinput Natural Scrolling Enabled' 1

	mousesize=48
	;;
    *)
	mousesize=72
	cat <<EOF | xrdb -merge
Xft.dpi: 168
EOF
	;;
esac

	cat <<EOF | xrdb -merge
Xcursor.theme: Adwaita
Xcursor.size: ${mousesize}
EOF


setxkbmap -option ctrl:nocaps

exec i3
#. /usr/local/etc/xdg/xfce4/xinitrc







|

|

|













|







1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
#. /usr/local/etc/xdg/xfce4/xinitrc

# find device: xinput
# find properties: xinput list-props $id

case $(hostname -s) in
    gulliver)
	touchpad=11
	# natural scrolling
	xinput --set-prop $touchpad 291 0.6 # Accel Speed
	# acceleration
	xinput --set-prop $touchpad 280 1   # Natural Scrolling Enabled

	mousesize=48
	;;
    *)
	mousesize=72
	cat <<EOF | xrdb -merge
Xft.dpi: 168
EOF
	;;
esac

	cat <<EOF | xrdb -merge
Xcursor.theme: Adwaita
Xcursor.size: $mousesize
EOF


setxkbmap -option ctrl:nocaps

exec i3
#. /usr/local/etc/xdg/xfce4/xinitrc

Added Justfile.







































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
help:
  @just --list

# export fossil wiki page for the current branch
[no-cd]
export-wiki:
  #!/bin/sh
  branch=$(fossil branch current)
  wikifile=wiki-branch-${branch}.md
  if [ ! -f $wikifile ]; then
    fossil sync
    fossil wiki export branch/$(fossil branch current) 2>/dev/null | sed 's/\r//g' > wiki-branch-$(fossil branch current).md
  else
    echo "$wikifile already exists" 2>&1
    exit 1
  fi

# commit fossil wiki page for the current branch
[no-cd]
commit-wiki:
  fossil wiki commit -M markdown branch/$(fossil branch current) wiki-branch-$(fossil branch current).md || fossil wiki create -M markdown branch/$(fossil branch current) wiki-branch-$(fossil branch current).md
  fossil sync

@status:
  fossil all changes
  fossil all extras

[no-cd]
new-bsd-prog name:
  #!/bin/sh
  set -e
  PROG={{name}} make -f {{justfile_directory()}}/.templates/bsd-prog/bsd-prog.mk

fossil-ui:
  fossil ui --ckout-alias trunk

Changes to LICENSE.



1
2


3


4
















Copyright (c) 2022, Pat Maddox, all rights reserved



Individual files or subdirectories of this repository may have a different


license.














>
>
|

>
>
|
>
>
|
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
BSD 2-Clause License

Copyright (c) 2022, Pat Maddox

Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:

1. Redistributions of source code must retain the above copyright notice, this
   list of conditions and the following disclaimer.

2. Redistributions in binary form must reproduce the above copyright notice,
   this list of conditions and the following disclaimer in the documentation
   and/or other materials provided with the distribution.

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

Added bin/derp_ips.





>
>
1
2
#!/bin/sh
fetch -o - https://login.tailscale.com/derpmap/default | fx '.Regions[].Nodes[].IPv4' | grep '"' | sed -e 's/"//g' -e 's/,//' -e 's/^[[:space:]]*//'

Added bin/doctor.



















































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
#!/usr/local/bin/just --justfile

doctor := ". ~/justlib/doctor.sh && doctor"

check: zfs-data net pf tailscale

@help:
  just -l -f {{justfile()}}

@zfs-data:
  {{doctor}} zfs-data "zfs get -H mounted $HOME/data | grep '/patmaddox-data'$'\t''mounted'$'\t'yes" 'zfs mount $<ZSAFE>/patmaddox-data'

@net:
  {{doctor}} network 'ping -q -c 1 8.8.8.8' 'service netif restart'

@pf: pf-enabled pf-rules

@pf-enabled:
  {{doctor}} pf-enabled 'doas service pf status | grep -q "Status: Enabled"' 'service pf start'

@pf-rules:
  {{doctor}} pf-rules 'doas pfctl -s rules | grep -q "block drop in all"' 'pfctl -f /etc/pf.conf'

@tailscale: net
  {{doctor}} tailscale 'tailscale status' 'service tailscaled restart'

Deleted bin/fossil-filter-ids.

1
2
3
#!/bin/sh
grep -E -o '\[[[:alnum:]]{10}\]' \
    | sed -e 's/\[//' -e 's/\]//'
<
<
<






Deleted bin/fossil-retag.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
#!/bin/sh
main()
{
    oldtag=${1:?$(usage)}; shift
    newtag=${1:?$(usage)}; shift

    for commit in $(fossil tag find -t ci ${oldtag} | fossil-filter-ids); do
	fossil tag cancel ${oldtag} ${commit}
	fossil tag add ${newtag} ${commit}
    done
}

usage(){
    echo "Usage: fossil-retag <old-tag> <new-tag>"
}

main "${@}"
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


































Deleted bin/fossil-untagged.

1
2
3
4
5
#!/bin/sh
fossil timeline -t ci -n 1000000000 -b trunk -F '%h %b %t %c' \
    | grep -E -v '^[[:alnum:]]{10} trunk trunk init$' \
    | grep -E '^[[:alnum:]]{10} ' \
    | awk '$2 == $3'
<
<
<
<
<










Added bin/fp.









>
>
>
>
1
2
3
4
#!/bin/sh
pd=$(realpath $(pwd))
fd=$(fossil status | grep '^local-root:' | awk '{print $2}')
echo "$pd" | sed -e "s|$fd||"

Deleted bin/git-tidy.

1
2
3
4
5
6
7
8
9
10
11
#!/bin/sh
set -e

branches=$(git branch --merged \
	       | grep -v '^*' \
	       | grep -v 'main$' \
	       | sed -E -e 's/^\+//' -e 's/^[[:space:]]*//')
for b in ${branches}; do
    if [ -d b/${b} ]; then git worktree remove -f b/${b}; fi
    git branch -d ${b}
done
<
<
<
<
<
<
<
<
<
<
<






















Added bin/jectl.









































































































































































































































































































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
#!/bin/sh
DATASET=$1
command=$2
shift 2

JROOT=$(zfs get -H mountpoint $DATASET | awk '{print $3}')
JAILENV_ROOT=$DATASET/ROOT

_check_jail_env_root()
{
    mountpoint=$(zfs get -H mountpoint $JAILENV_ROOT | awk '{print $3}')
    if [ $mountpoint != "none" ]; then
	echo $mountpoint
    fi
}

_check_jail_env_mountpoints()
{
    jailenvs=$(zfs get -H -r -t filesystem mountpoint $JAILENV_ROOT | grep -e "^${JAILENV_ROOT}/")

    echo "$jailenvs" | while read line; do
	mountpoint=$(echo $line | awk '{print $3}')
	jail_env=$(echo $line | awk '{print $1}')

	if [ $mountpoint != $JROOT ]; then
	    echo "$jail_env $mountpoint"
	fi
    done
}

_check_jail_env_canmounts()
{
    jailenvs=$(zfs get -H -r -t filesystem canmount $JAILENV_ROOT | grep -e "^${JAILENV_ROOT}/")

    echo "$jailenvs" | while read line; do
	canmount=$(echo $line | awk '{print $3}')
	jail_env=$(echo $line | awk '{print $1}')

	if [ $canmount != "noauto" ]; then
	    echo "$jail_env $canmount"
	fi
    done
}

_check_children()
{
    children=$(zfs get -H -r -t filesystem mountpoint $DATASET | grep -v $JAILENV_ROOT)

    echo "$children" | while read line; do
	mountpoint=$(echo $line | awk '{print $3}')
	childset=$(echo $line | awk '{print $1}')

	echo $mountpoint | grep -e "^${JROOT}/" > /dev/null
	starts_with=$?

	if [ $childset != $DATASET -a $starts_with -ne 0 -a $mountpoint != "none" ]; then
	    echo "$childset $mountpoint"
	fi
    done
}

_check_root()
{
    if [ $JROOT = "/" -o $JROOT = "none" ]; then
	return 1
    fi
    return 0
}

_check()
{
    _check_root
    if [ $? -ne 0 ]; then
	echo "Error: $DATASET mountpoint is $JROOT"
    fi

    jail_env_root=$(_check_jail_env_root)
    if [ "$jail_env_root" ]; then
	echo "Error: $JAILENV_ROOT has mountpoint=$jail_env_root, but should be none"
    fi

    jail_envs=$(_check_jail_env_mountpoints)
    if [ "$jail_envs" ]; then
	echo "$jail_envs" | while read line; do
	    jenv=$(echo $line | awk '{print $1}')
	    mountpoint=$(echo $line | awk '{print $2}')
	    echo "Error: $jenv has mountpoint=${mountpoint}, but it should match $DATASET ($JROOT)"
	done
    fi

    jail_envs=$(_check_jail_env_canmounts)
    if [ "$jail_envs" ]; then
	echo "$jail_envs" | while read line; do
	    jenv=$(echo $line | awk '{print $1}')
	    canmount=$(echo $line | awk '{print $2}')
	    echo "Error: $jenv has canmount=${canmount}, but it should be noauto"
	done
    fi

    children=$(_check_children)
    if [ "$children" ]; then
	echo "$children" | while read line; do
	    childset=$(echo $line | awk '{print $1}')
	    mountpoint=$(echo $line | awk '{print $2}')
	    echo "Error: $childset has mountpoint=${mountpoint}, but should be under $JROOT (or none)"
	done
    fi
}

_recover()
{
    _check_root
    if [ $? -ne 0 ]; then
	echo "Can't recover: $DATASET mountpoint is $JROOT"
	exit 1
    fi

    jail_env_root=$(_check_jail_env_root)
    if [ "$jail_env_root" ]; then
	zfs set mountpoint=none $JAILENV_ROOT
    fi

    jail_envs=$(_check_jail_env_mountpoints)
    if [ "$jail_envs" ]; then
	echo "$jail_envs" | while read line; do
	    jenv=$(echo $line | awk '{print $1}')
	    zfs set mountpoint=$JROOT $jenv
	done
    fi

    jail_envs=$(_check_jail_env_canmounts)
    if [ "$jail_envs" ]; then
	echo "$jail_envs" | while read line; do
	    jenv=$(echo $line | awk '{print $1}')
	    zfs set canmount=noauto $jenv
	done
    fi

    children=$(_check_children)
    if [ "$children" ]; then
	echo "$children" | while read line; do
	    childset=$(echo $line | awk '{print $1}')
	    zfs inherit mountpoint $childset
	done
    fi
}

_list()
{
    jailenvs=$(zfs list -H -t filesystem -r $JAILENV_ROOT | grep -e "^${JAILENV_ROOT}/" | awk '{print $1}' | sed -e "s|${JAILENV_ROOT}/||")
    echo "$jailenvs" | while read line; do
	active=$(zfs get -H jectl:active $JAILENV_ROOT/$line | awk '{print $3}')
	if [ "$active" = "on" ]; then
	   echo "$line *"
	else
	    echo $line
	fi
    done
}

_activate()
{
    dataset="$JAILENV_ROOT/$1"
    zfs list $dataset > /dev/null 2>&1
    if [ $? -ne 0 ]; then
	echo "Error locating $dataset"
	exit 1
    fi

    jailenvs=$(zfs list -H -t filesystem -r $JAILENV_ROOT | grep -e "^${JAILENV_ROOT}/" | awk '{print $1}' | sed -e "s|${JAILENV_ROOT}/||")
    echo "$jailenvs" | while read line; do
	zfs inherit jectl:active $JAILENV_ROOT/$line
    done
    zfs set jectl:active=on $JAILENV_ROOT/$1
}

_boot()
{
    active=$(zfs get -H -t filesystem -r jectl:active $JAILENV_ROOT | grep -e "^${JAILENV_ROOT}/" | grep -e "[[:space:]]on[[:space:]]" | awk '{print $1}')
    mounted=$(zfs mount | grep -e "[[:space:]]${JROOT}$" | awk '{print $1}')
    if [ "$mounted" -a "$active" = "$mounted" ]; then
    elif [ "$mounted" -a "$active" != "$mounted" ]; then
	zfs unmount $JROOT
	zfs mount $active
    else
	zfs mount $active
    fi
}

_usage()
{
    echo "Usage:"
    echo "  jectl jail-dataset activate je"
    echo "  jectl jail-dataset check|recover|list|boot"
}

case $command in
    check)
	_check
	;;

    recover)
	_recover
	;;
    list)
	_list
	;;
    activate)
	if [ $# -ne 1 ]; then
	    _usage
	    exit 1
	fi

	check_results=$(_check)
	if [ "$check_results" ]; then
	    echo "$check_results"
	    exit 1
	fi

	_activate $1
	;;
    boot)
	_boot
	;;
    *)
	_usage
	;;
esac

Added bin/jlfs.











































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
#!/bin/sh
set -e

jlfs_dist=/tmp/jlfs-dist

if [ ! "$1" ]; then
  echo "Usage: jlfs <name>"
  exit 1
fi

name=$1

if [ -d "/jails/${name}" ]; then
  exit 0
fi

jroot=`zfs mount | grep '[[:space:]]/jails$' | awk '{print $1}'`
zfs create ${jroot}/${name}
zfs create ${jroot}/${name}/tmp
zfs create -o canmount=off ${jroot}/${name}/usr
zfs create ${jroot}/${name}/usr/home
zfs create -o canmount=off ${jroot}/${name}/var
zfs create ${jroot}/${name}/var/log

myroot=`zfs mount | grep '[[:space:]]/$' | awk '{print $1}'`
mymountpoint=`zfs get -H -o value mountpoint $myroot`
# can't delegate a dataset if we're in a jail, so don't create one
if [ $mymountpoint = "/" ]; then
    zfs create -o jailed=on -o mountpoint=none ${jroot}/${name}/zdata
fi

if [ ! -f $jlfs_dist/base.txz ]; then
  mkdir -p $jlfs_dist
  fetch -o $jlfs_dist/base.txz https://download.freebsd.org/releases/amd64/13.1-RELEASE/base.txz
fi

tar -C /jails/${name} -xf $jlfs_dist/base.txz

Added bin/jlif.

































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
#!/bin/sh
set -e

if [ ! "$1" ]; then
  echo "Usage: jlif <name>"
  exit 1
fi

name=$1

aname=`ifconfig epair create up`
bname=`echo "$aname" | sed -e 's/a$/b/'`

ifconfig jails addm $aname
ifconfig $aname name ${name}a
ifconfig $bname name ${name}b

Added bin/nocaps.





>
>
1
2
#!/bin/sh
setxkbmap -option ctrl:nocaps

Added bin/snap_and_sync.sh.



















>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
#!/bin/sh
set -e
export TZ=UTC
echo "BEG snap-and-sync: $(date)"
sudo sanoid --cron --quiet
sudo syncoid --quiet -r --no-rollback --no-privilege-elevation --no-sync-snap --sendoptions="w" --recvoptions="u" --sshkey=/usr/home/patmaddox/data/keys/ssh-nas-rsyncnet zdata/crypt/safe nas-user@de1194b.rsync.net:zsync/snaps/nas-safe
sudo syncoid --quiet -r --no-rollback --no-privilege-elevation --no-sync-snap --sendoptions="w" --recvoptions="u" --sshkey=/usr/home/patmaddox/data/keys/ssh-nas-rsyncnet zdata/crypt/snaps/beastie-safe nas-user@de1194b.rsync.net:zsync/snaps/beastie-safe
sudo syncoid --quiet -r --no-rollback --no-privilege-elevation --no-sync-snap --sendoptions="w" --recvoptions="u" --sshkey=/usr/home/patmaddox/data/keys/ssh-nas-rsyncnet zdata/crypt/snaps/gulliver-safe nas-user@de1194b.rsync.net:zsync/snaps/gulliver-safe
echo "END snap-and-sync: $(date)"

Added bin/tmpfossil.









































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
#!/bin/sh
set -e

if [ $# -eq 0 ]; then
    cat <<'EOF'
Usage:
  create: $(tmpfossil <suffix>)
  clean:  tmpfossil -c
EOF
    exit 1
fi

_tmpfossil()
{
    work=$(mktemp -d -t tmpfossil-${suffix})
    cd $work
    fossil new fossil.db
    sqlite3 fossil.db "UPDATE user SET pw='password' WHERE login='$(whoami)'"
    fossil open fossil.db
}

if [ "$1" = "-c" ]; then
    rm -rf /tmp/tmpfossil-*
else
    suffix=$1
    work=


    _tmpfossil > /dev/null 2>&1

    if [ -n "$work" ]; then
	echo "cd $work"
    else
	echo echo error creating tmp fossil
    fi
fi

Added bin/update-patmaddox.com.













>
>
>
>
>
>
1
2
3
4
5
6
#!/bin/sh
set -e
cat ~/Downloads/www_patmaddox_com.pem ~/Downloads/www_patmaddox_com.key > ~/Downloads/www_patmaddox_com.haproxy.pem
scp ~/Downloads/www_patmaddox_com.haproxy.pem root@patmaddox.com:
ssh root@patmaddox.com "service haproxy reload"
rm ~/Downloads/www_patmaddox_com.pem ~/Downloads/www_patmaddox_com.key ~/Downloads/www_patmaddox_com.haproxy.pem

Added dist/pubkeys.





>
>
1
2
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIM5W1fmVtiqaU1b/ghN9WmQv4fUgaVu6nZp56Cihhp3P patmaddox@beastie
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJsP0Iutc3/5X4vu6DQl26aWAgKdKJwz7fQ1waTW+iP7 patmaddox@gulliver

Added drafts/20231216-c-fork-pipe/c-forksock/Makefile.











>
>
>
>
>
1
2
3
4
5
PROG=		forksock
CFLAGS+=	-Wall -Werror -Wextra -pedantic
MK_MAN=		no

.include <bsd.prog.mk>

Added drafts/20231216-c-fork-pipe/c-forksock/forksock.c.































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
#include <stdio.h>
#include <stdlib.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <unistd.h>

#define BACKLOG 5
#define BUFFER_SIZE 100
#define PING "PING!"
#define PONG "PONG!"
#define SLEEP 1

void client(int s);
void server(int s);

int main(int argc, char *argv[])
{
  enum { NONE, PIPE, SOCKPAIR } transport = NONE;
  int opt;
  int socks[2];

  while ((opt = getopt(argc, argv, "ps")) != -1) {
    switch (opt) {
    case 'p': transport = PIPE; break;
    case 's': transport = SOCKPAIR; break;
    }
  }

  switch (transport) {
  case PIPE:
    if (pipe(socks) == -1) {
      perror("main: unable to pipe(2)");
      exit(1);
    }
    break;
  case SOCKPAIR:
    if (socketpair(PF_UNIX, SOCK_STREAM, 0, socks) == -1) {
      perror("main: unable to socketpair(2)");
      exit(1);
    }
    break;
  default:
    printf("error: must pass -p (pipe) or -s (socketpair)\n");
    exit(1);
  }

  int forkres = fork();
  if (forkres == -1) {
    perror("main: unable to fork(2)");
    exit(1);
  }

  if (forkres == 0) {
    server(socks[0]);
  } else {
    client(socks[1]);
  }
}

void server(int s) {
  pid_t pid = getpid();
  char buffer[BUFFER_SIZE];

  FILE *client = fdopen(s, "w+");
  if (!client) {
    perror("server: unable to fdopen(3)");
    exit(1);
  }

  while (fgets(buffer, BUFFER_SIZE, client)) {
    fprintf(client, "(%i) %s\n", pid, PONG);
  }
}

void client(int s) {
  pid_t pid = getpid();
  char buffer[BUFFER_SIZE];

  FILE *server = fdopen(s, "w+");

  if (!server) {
    perror("client: unable to fdopen(3)");
    exit(1);
  }

  while(1) {
    printf("(%i) %s\n", pid, PING);
    fprintf(server, "%s\n", PING);

    fgets(buffer, BUFFER_SIZE, server);
    printf("%s", buffer);

    sleep(SLEEP);
  }
}

Added drafts/20231216-c-fork-pipe/c-nng-uds/Makefile.











>
>
>
>
>
1
2
3
4
5
PROG=		c-nng-uds
CFLAGS+=	-Wall -Werror -Wextra -pedantic -I/usr/local/include -L/usr/local/lib -lnng
MK_MAN=		no

.include <bsd.prog.mk>

Added drafts/20231216-c-fork-pipe/c-nng-uds/c-nng-uds.c.



































































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
#include <nng/nng.h>
#include <nng/protocol/reqrep0/rep.h>
#include <nng/protocol/reqrep0/req.h>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>

#define SOCKFILE "/tmp/c-nng-uds.sock"
#define SOCKURL "ipc:///tmp/c-nng-uds.sock"
#define PING "PING!"
#define PONG "PONG!"
#define SLEEP 1

void server();
void client();

int main()
{
  int forkres;

  if ((forkres = fork()) == -1) {
    perror("main: unable to fork(2)");
    exit(1);
  } else if (forkres == 0) {
    client();
  } else {
    server();
  }
}

void server() {
  nng_socket sock;
  nng_listener listener;
  char *rbuf = NULL;
  char *sbuf = NULL;
  size_t sz;

  sbuf = nng_alloc(sizeof(PONG));
  sprintf(sbuf, "%s", PONG);

  if (nng_rep_open(&sock) != 0) {
    perror("unable to nng_rep_open(3)");
    exit(1);
  }

  if (nng_listener_create(&listener, sock, SOCKURL) != 0) {
    perror("unable to nng_listener_create(3)");
    exit(1);
  }

  if (nng_listener_start(listener, 0) != 0) {
    perror("unable to nng_listener_start(3)");
    exit(1);
  }
  printf("listening on %s\n", SOCKFILE);

  while (true) {
    if (nng_recv(sock, &rbuf, &sz, NNG_FLAG_ALLOC) != 0) {
      perror("unable to nng_recv(3)");
      exit(1);
    }
    nng_free(rbuf, sz);
    rbuf = NULL;

    if (nng_send(sock, sbuf, sizeof(PONG), NNG_FLAG_ALLOC) != 0) {
      perror("unable to nng_send(3)");
      exit(1);
    }
  }
}

void client() {
  nng_socket sock;
  nng_dialer dialer;
  size_t sz;
  char *sbuf = NULL;
  char *rbuf = NULL;

  printf("%s\n", PING);
  sbuf = nng_alloc(sizeof(PING));
  sprintf(sbuf, "%s", PING);

  if (nng_req_open(&sock) != 0) {
    perror("unable to nng_req_open(3)");
    exit(1);
  }

  if (nng_dialer_create(&dialer, sock, SOCKURL) != 0) {
    perror("unable to nng_dialer_create(3)");
    exit(1);
  }

  if (nng_dialer_start(dialer, 0) != 0) {
    perror("unable to nng_dialer_start(3)");
    exit(1);
  }

  while(true) {
    if (nng_send(sock, sbuf, sizeof(PING), 0) != 0) {
      perror("unable to nng_send(3)");
      exit(1);
    }

    if (nng_recv(sock, &rbuf, &sz, NNG_FLAG_ALLOC) != 0) {
      perror("unable to nng_recv(3)");
      exit(1);
    }

    printf("%s\n", rbuf);

    sleep(SLEEP);
  }
}

Added drafts/20231216-c-fork-pipe/index.md.





>
>
1
2
# start a c process, fork it, pipe data between processes

Added drafts/DO-NOT-EDIT.txt.





>
>
1
2
I'm not committing drafts to trunk anymore.
Drafts get their own branch.

Added drafts/acme-lets-encrypt-manual-dns.md.













>
>
>
>
>
>
1
2
3
4
5
6
# acme.sh - Let's Encrypt manual DNS verification

- `acme.sh --issue --server letsencrypt --dns -d 'example.com' -d '*.example.com' --yes-I-know-dns-manual-mode-enough-go-ahead-please --log`
- add / update the TXT records
- `acme.sh --renew -d example.com --yes-I-know-dns-manual-mode-enough-go-ahead-please`
- `acme.sh --install-cert -d example.com --key-file example.com.key.pem --fullchain-file example.com.crt.pem`

Added drafts/backup.md.



















































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
# What files should I restore from backup?

I have backed up a bunch of files - `/boot/loader.conf`, `/etc/`, and `/usr/local/etc/`.
Now I want to restore them.
How do I determine which files I need to restore?

The simple answer is just to restore everything.
If the file hasn't changed, it's a no-op.
If the file has changed, I have the new one.

Still, sometimes I'd like to know which files changed.
Maybe I want to merge a config file instead of overwrite it.
Maybe I just want to see what makes this particular system unique.
`rsync(1)` makes it easy:

`rsync -n -a --out-format="%n" from/ to/`

That prints the list of files that I need to restore.

# notes

- zfs datasets: one per host, e.g. `zdata/crypt/backups/luckbox`
- rsync a known file list to the backup dataset
- snapshot it
- syncoid to a remote server

Added drafts/bare-bones-freebsd-jails.md.





























>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
# bare-bones freebsd jails

## binary jails

- just a binary, and supporting files (configs, libs, etc)
- not a whole lot of use for these - proof of concept, and isolate something potentially dangerous (e.g. you don't trust dependencies)

## service jails

- binary jails with networking

## vnet jails

- binary jails with vnet networking

Added drafts/build-personal-freebsd-ports-tree-with-poudriere.md.









































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
# Build a personal FreeBSD ports tree with poudriere

- why: because you want to run software that isn't in the freebsd ports tree
- nobody has ported it yet
- someone has submitted a patch, but it hasn't been merged
- a ports maintainer disagrees with a proposed change
- private software

- configure poudriere
- fork the ports tree
- build
- configure machine to use custom built ports tree

- tips
- make a category for your own stuff
- use -b latest
- reset main to match latest pkg-status build
- keep a single "all my ports" tree and reconstruct it from other branches
- keep all new ports in their own branch - rebase dependencies
- script it

Added drafts/cli-testing-with-kyua-and-atf.md.



























































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
# CLI testing with Kyua and ATF

`pkg install atf`

## Running tests

`kyua test`

If a test fails:

`kyua debug test_suite.sh:test_case`

## Defining tests

Simple `Kyuafile`:

```
syntax(2)

test_suite('my_suite')

atf_test_program{name='test_program.sh'}
```

Simple `test_program.sh`:

```
#! /usr/bin/env atf-sh
set -e

## test cases
atf_init_test_cases() {
    atf_add_test_case first_test
}

## empty
atf_test_case first_test

status_first_test_body() {
    atf_check -s exit:0 -o empty true
    atf_check -s exit:1 -o empty false
	atf_check -s exit:0 -o inline:"hello world" echo "hello world"
}
```

## `$PATH` in kyua / atf

kyua tests will often need to call a specific program (e.g. a script, or output from a build system).
Instead of managing the path, refer to the exact binary using `$(atf_get_srcdir)`:

```
mycmd=$(atf_get_srcdir)/../mycmd

# ...

mytest_body() {
  atf_check -s exit:0 $mycmd
}
```

This ensures that the Kyua tests are self-contained, and can be constructed into a tree using `include`.

Added drafts/ffi.md.

















































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
# Comparing FFI in different languages

- goals
  - call FreeBSD C libraries
  - build command line tools
  - write automated tests for operating system libraries
- criteria
  - FFI ergonomics
  - general language ergonomics / toolchain
- noteworthy, but non-criteria
  - performance
  - shared memory access
  - calling other language from C
  - memory control
- preference
  - languages that compile to single binary
- test cases
  - hello world - pass in an arg
  - mutate from C - upcase
  - intentional memory leak
  - concurrency
  - libucl
- notes
  - all pretty much the same
    - most have you write some kind of header definition
    - sometimes you have to write a binding funtion also
  - strings are a pain
    - C strings have no length, are null-terminated
    - newer languages have length, not null-terminated
	- newer languages often use UTF encoded strings, C strings are a sequence of bytes with no specified encoding
    - almost always result in two memory copies - convert language string to C string, call C, convert C string back to language
	- can also create null pointer in calling language, and let C initialize it (sometimes)
  - automatic header import vs explicit bindings
    - Go, Zig, and C++ can use C headers directly, without needing to define bindings
	- D, Nim, and Rust need to explicitly define bindings
    - automatic header imports is very useful, especially as you have to reference more stuff
	- structs seem like a PITA with languages requiring bindings
	- explicit bindings sucks - you get into definition dependencies. Nim has C2Nim, but on ucl.h it used 30GB of RAM and then got OOM killed.
  - C++ calls C functions directly, so the library calling doesn't change at all
    - Supposedly there's a potential issue if the C header includes other headers - they end up including the C++ header with the same name.

Added drafts/fossil-code-management.md.































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
# personal code management with fossil

- it's a lot of fun
- per-branch wiki page for documentation / todo list
  - can also do it for commits or tags
- workflow:
  - work on stuff in a branch
  - when it's ready, merge
  - this produces a super clean timeline
  - all you see is the merges, and the diff
  - you can drill down to the full merge commit history if you want
- can edit commit messages after the fact
  - really nice, because sometimes I want to reword / add detail
- super helpful forum
  - right-sized community (general feeling I've had as I've been involved in things over the years)

Added drafts/git-merge-squash/Makefile.



























































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
BUILDDIR?=	_build
GIT=		git -C ${BUILDDIR}

all:
	mkdir ${BUILDDIR}
	${GIT} init
	${GIT} commit --allow-empty -m init

	${GIT} checkout -b b1
	echo 'f1c1' > ${BUILDDIR}/file1
	${GIT} add file1
	${GIT} commit -m 'f1c1'
	echo 'f1c2' >> ${BUILDDIR}/file1
	${GIT} commit -am 'f1c2'

	${GIT} checkout -b b2 main
	echo 'f2c1' > ${BUILDDIR}/file2
	${GIT} add file2
	${GIT} commit -m 'f2c1'
	echo 'f2c2' >> ${BUILDDIR}/file2
	${GIT} commit -am 'f2c2'

	${GIT} checkout -b b1-squash main
	${GIT} merge --squash b1
	${GIT} commit -m 'squash b1'
	${GIT} checkout -b b2-squash main
	${GIT} merge --squash b2
	${GIT} commit -m 'squash b2'

	${GIT} checkout main
	echo 'f3c1' > ${BUILDDIR}/file3
	${GIT} add file3
	${GIT} commit -m 'f3c1'
	${GIT} merge --no-ff --no-edit b1
	echo 'f3c2' >> ${BUILDDIR}/file3
	${GIT} commit -am 'f3c2'
	${GIT} merge --no-ff --no-edit b2
	echo 'f3c3' >> ${BUILDDIR}/file3
	${GIT} commit -am 'f3c3'
	${GIT} merge --no-ff --no-edit b1-squash
	${GIT} merge --no-ff --no-edit b2-squash
	${GIT} log --first-parent | cat

clean:
	rm -rf ${BUILDDIR}

Added drafts/git-merge-squash/index.md.



























>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
# git merge and squash

Maintaining a fork means merging.
You don't control how upstream handles merges.
FreeBSD effectively squashes commits that you may have merged.

Create a repo.
Commit to two different branches.
Merge the branches as-is.
Then squash merge them.

Interestingly the right side commits appear out of order if I do a `git log`.
I need to `git log --first-parent` to make the commits show up in the expected order.

Added drafts/git-subtree-projects/Makefile.









































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
BUILDDIR?=	_build
PROJDIR=	${BUILDDIR}/proj
DEPDIR=		${BUILDDIR}/dep

all:
	mkdir -p ${PROJDIR}
	git -C ${PROJDIR} init
	git -C ${PROJDIR} commit --allow-empty -m 'init proj'
	echo 'starting proj 1' > ${PROJDIR}/readme
	git -C ${PROJDIR} add readme
	git -C ${PROJDIR} commit -m 'starting proj 1'

	mkdir -p ${DEPDIR}
	git -C ${DEPDIR} init
	git -C ${DEPDIR} config receive.denyCurrentBranch ignore
	git -C ${DEPDIR} commit --allow-empty -m 'init dep'
	echo 'starting dep' > ${DEPDIR}/readme
	git -C ${DEPDIR} add readme
	git -C ${DEPDIR} commit -m 'starting dep'

	git -C ${PROJDIR} remote add -f dep-upstream ../dep
	git -C ${PROJDIR} subtree -P dep add dep-upstream/main

	echo 'more proj' >> ${PROJDIR}/readme
	git -C ${PROJDIR} commit -am 'more proj'

	echo 'more dep' >> ${PROJDIR}/dep/readme
	git -C ${PROJDIR} commit -am 'more dep'

	git -C ${PROJDIR} subtree -P dep push dep-upstream main

	git -C ${PROJDIR} log | cat
	git -C ${DEPDIR} log | cat

clean:
	rm -rf ${BUILDDIR}

Added drafts/git-subtree-projects/index.md.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
git subtree projects
from "maintaining a FreeBSD fork" talk

I don't think I like it.
It doesn't make it easy to use different branches of a dependency.
You have to completely delete and re-create the dependency.
On the other hand, it keeps the entire repo self-contained with no external git dependencies.

Added drafts/language-gauntlet.md.























































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
# Language Gauntlet

I started working on Advent of Code 2023 for the first time.
I solved the second puzzle, started on the second one, and am not really feeling it.
I thought I would use it to explore different languages.
It turns out that I probably prefer something more direct and systematic.

When learning a new language, there are a few different aspects to focus on:

- syntax
- libraries
- tools
- idioms
- special features

My idea is a language gauntlet - a structured, progressive set of exercises to take me through as many of those aspects as possible.
I can write top-level tests, where the interface is a CLI application.
Then I can take any new language or technique through the gauntlet.

Probably I can take exercises from exercism, or certain books like _The C Programming Language_ as a starting point.
Perhaps I can have multiple gauntlets, pulled from different sources and standardized.

Even better: just use the FreeBSD test suite.
I don't know how well-tested it is, but there are a lot of tests.
They're straightforward, useful utilities.
I can implement new versions in any language, and use the tests to verify the implementation.
It's not about rewriting BSD in another language, but simply using the test suite to provide direction.

Added drafts/make-vs-shell-script.md.













>
>
>
>
>
>
1
2
3
4
5
6
shell script when you need to save state

makefile is all macros and doesn't have state
it expands to text at runtime

if you need to maintain state, with variables, use shell scripts

Added drafts/write-with-org-publish-with-markdown.md.







































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
# Write with org-mode, publish with markdown

I like writing with org-mode. It's more about the text. Structure is minimal. It's easy to move things around.

Markdown is nice, and almost as good. It still is about marking up text though.

org-mode can export to markdown. So I can write in org-mode, and publish with markdown.


# Ideas

Have a single org file with all of my writing (drafts.org) Export a particular heading to the markdown file when saving. Even better: `fossil diff` to determine which lines have changed, and export the heading above that line. Should be possible to do in parallel with make (although running emacs multiple times may be slower).

Hide the `begin_src` / `end_src` blocks:

```emacs-lisp
(setq-default prettify-symbols-alist '(("#+BEGIN_SRC" . "λ")))
(add-hook 'org-mode-hook 'prettify-symbols-mode)
```

Added drafts/write-with-org-publish-with-markdown.org.













































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
* Write with org-mode, publish with markdown
I like writing with org-mode.
It's more about the text.
Structure is minimal.
It's easy to move things around.

Markdown is nice, and almost as good.
It still is about marking up text though.

org-mode can export to markdown.
So I can write in org-mode, and publish with markdown.
* Ideas
Have a single org file with all of my writing (drafts.org)
Export a particular heading to the markdown file when saving.
Even better: `fossil diff` to determine which lines have changed, and export the heading above that line.
Should be possible to do in parallel with make (although running emacs multiple times may be slower).

Hide the begin_src / end_src blocks:
#+BEGIN_SRC emacs-lisp
  (setq-default prettify-symbols-alist '(("#+BEGIN_SRC" . "λ")))
  (add-hook 'org-mode-hook 'prettify-symbols-mode)
#+END_SRC

Added examples/c-jail/Makefile.























>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
.PHONY: run

run: _build/hello
	doas ./_build/hello

_build/hello: hello.c
	@mkdir -p _build
	cc -Wall -Werror -o ${.TARGET} ${.ALLSRC}

clean:
	rm -rf _build

Added examples/c-jail/hello.c.































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
#include <stdlib.h>
#include <unistd.h>
#include <sys/param.h>
#include <sys/types.h>
#include <sys/jail.h>
#include <stdio.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <errno.h>

int main() {
  struct in_addr addr;
  if(!inet_aton("192.168.3.81", &addr)) {
    exit(69);
  }

  struct jail j = {
    JAIL_API_VERSION,
    "/tmp/jroot",
    "chicken",
    "chicken",
    1,
    0,
    &addr,
    NULL
  };

  char *path = malloc(sizeof(char) * 128);
  getcwd(path, 128);
  printf("path outside jail: %s\n", path);

  int jid = jail(&j);
  if(jid >= 0) {
    printf("started jail jid: %i\n", jid);
  } else {
    printf("error: (%i)\n", errno);
    exit(1);
  }

  getcwd(path, 128);
  printf("path in jail: %s\n", path);

  while(1) {
    sleep(3);
  }
}

Added examples/fossil-commit-files/Makefile.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
.PHONY: run clean

run: clean
	./run.sh

clean:
	rm -rf work

Added examples/fossil-commit-files/run.sh.



































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
#!/bin/sh
set -e

mkdir work
cd work

fossil new test.fossil
fossil open --nested test.fossil

echo "new file1" > file1
fossil add file1

echo "new file2" > file2
fossil add file2

fossil commit file1 -m "only commit file1"
fossil status

Added examples/git-rebase/Makefile.































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
.PHONY: help clean

help:
	@echo "targets:"
	@echo "  bad"
	@echo "  good"

bad: clean
	./bad.sh

good: clean
	./good.sh

clean:
	rm -rf work

Added examples/git-rebase/README.md.























































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
# git-rebase example

"Never modify a published branch" - here's why

After amending the commit on `main`, `branch1` still has the commit.
When it rebases from `main`, it pulls the new commit from `main`,
and also tries to keep the commit that was originally on `main`.

Original history:

```
main:
commit1

branch1:
commit2
commit1
```

desired history:

```
main:
amended1

branch1:
commit2
amended1
```

actual history:

```
branch1:
commit2
commit1
amended1
```

In this case, we need to skip the commit that originally came from `main` using
`git rebase --skip`.

Note: `git rebase --show-current-patch` shows the patch that it is trying to rebase.

Added examples/git-rebase/bad.sh.

















































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
#!/bin/sh
set -e

mkdir -p work/main

echo "=== first main commit"
git -C work/main init
git -C work/main commit -m "init" --allow-empty
echo "first main" > work/main/file1
git -C work/main add .
git -C work/main commit -m "first main commit"

echo "=== first branch1 commit"
git -C work/main worktree add ../branch1
echo "first branch1" > work/branch1/file2
git -C work/branch1 add .
git -C work/branch1 commit -m "first branch1 commit"

echo "=== amending main commit"
echo "amended main" >> work/main/file1
git -C work/main commit -a --amend -m "amended main commit"

echo "=== rebasing branch1 from main"
git -C work/branch1 rebase main

Added examples/git-rebase/good.sh.

















































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
#!/bin/sh
set -e

mkdir -p work/main

echo "=== first main commit"
git -C work/main init
git -C work/main commit -m "init" --allow-empty
echo "first main" > work/main/file1
git -C work/main add .
git -C work/main commit -m "first main commit"

echo "=== first branch1 commit"
git -C work/main worktree add ../branch1
echo "first branch1" > work/branch1/file2
git -C work/branch1 add .
git -C work/branch1 commit -m "first branch1 commit"

echo "=== amending main commit"
echo "amended main" >> work/main/file1
git -C work/main commit -a --amend -m "amended main commit"

echo "=== rebasing branch1 from main"
git -C work/branch1 rebase main || git -C work/branch1 rebase --skip

Added examples/jail.conf.







































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
path = "/jails/${name}";
vnet;
$iface = "$name";
vnet.interface = "${iface}b";
host.hostname = "$name";
exec.clean;
exec.start = "sh /etc/rc";
exec.stop = "sh /etc/rc.shutdown jail";
exec.release = "ifconfig ${iface}a destroy";
enforce_statfs = 1;
devfs_ruleset = 6;
allow.mount;
allow.mount.devfs;
mount.devfs;

pdr_nginx {
  $ip = "192.168.2.3/24";
}

jailtest {
  exec.prepare = "~patmaddox/bin/jectl zroot/JAILS/$name boot";
  exec.prepare += "~patmaddox/bin/jlprep $name $ip $iface";
  $ip = "192.168.2.4/24";
}

porttest {
#  exec.prepare = "~patmaddox/bin/jectl zroot/JAILS/$name boot";
  exec.prepare += "~patmaddox/bin/jlprep $name $ip $iface";
  $ip = "192.168.2.5/24";
}

nginx_example {
  exec.prepare += "~patmaddox/bin/jlprep $name $ip $iface";
  $ip = "192.168.2.6/24";
}

Added examples/pf.conf.









































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
ext_if = "em0"
jail_if = "jails"
jail_net = $jail_if:network
bhyve_if = "bhyves"
bhyve_net = $bhyve_if:network

set skip on lo
scrub in

nat on $ext_if from $jail_net -> ($ext_if:0)
nat on $ext_if from $bhyve_net -> ($ext_if:0)

pass out

block in

pass in proto tcp to port { 22 }
pass in inet proto icmp icmp-type { echoreq }
pass from $jail_net
pass on $bhyve_if from $bhyve_net

Added examples/phabricator-delete-flags.sh.





































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
#!/bin/sh
# recursively delete all of my flags on phabricator
token=${TOKEN:?}

delete_flags() {
    echo "about to delete some flags"
    fids=$(curl -s https://reviews.freebsd.org/api/flag.query -d api.token=$token | fx 'x.result.map(o => o.id).join(" ")')

    if [ -n "$fids" ]; then
	for fid in $fids; do
	    curl -s https://reviews.freebsd.org/api/flag.delete -d api.token=api-5ynsxilnm43tgkm2y2sbokowc5wi -d id=$fid > /dev/null
	    echo "deleted $fid"
	done
	delete_flags
    fi
}

delete_flags

Added examples/pkg-create-example/.gitignore.









>
>
>
>
1
2
3
4
manifest
pkg-create-example-*.pkg
pkg-plist
stage

Added examples/pkg-create-example/Makefile.









































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
PKG = pkg-create-example-${VERSION}.pkg
VERSION= 1.0.0
PREFIX= /usr/local

.PHONY: all clean clean-stage

all: ${PKG}
clean: clean-stage
	rm -f ${PKG} pkg-plist manifest
clean-stage:
	rm -rf stage

SRC_FILES = ${:! find src -type f !}

${PKG}: clean-stage ${SRC_FILES}
	mkdir -p stage${PREFIX}
	cp -Rp src/ stage${PREFIX}/
	cat manifest.in | sed -e 's|%%VERSION%%|${VERSION}|' | sed -e 's|%%PREFIX%%|${PREFIX}|' > manifest
	echo ${:! find stage -type f !:C/^stage//} | tr ' ' '\n' > pkg-plist
	pkg create -M manifest -r stage -p pkg-plist

Added examples/pkg-create-example/manifest.in.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
name: pkg-create-example
version: %%VERSION%%
origin: devel/pkg-create-example
comment: Example demonstrating how to create a package using pkg-create(8)
www: https://github.com/patmaddox/home/tree/main/examples/pkg-create-example
maintainer: pat@patmaddox.com
prefix: %%PREFIX%%
desc: Example demonstrating how to create a package using pkg-create(8)

Added examples/pkg-create-example/src/share/pkg-create-example/bar.



>
1
hello bar

Added examples/pkg-create-example/src/share/pkg-create-example/foo.



>
1
hello foo

Added examples/pkg-create-example/src/share/pkg-create-example/subdir/baz.



>
1
hello baz

Added experiments/bhyve/Makefile.









































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
VM=	newhost
VERS=	13.2-RELEASE
DOWNLOAD= https://download.freebsd.org/releases/amd64/${VERS}

.if ${VERS} == CURRENT
DOWNLOAD= https://download.freebsd.org/snapshots/amd64/15.0-CURRENT
.endif

.PHONY: new-vm dist

dist: dist/${VERS}-base.txz dist/${VERS}-kernel.txz

dist/${VERS}-base.txz:
	fetch -o ${.TARGET} ${DOWNLOAD}/base.txz

dist/${VERS}-kernel.txz:
	fetch -o ${.TARGET} ${DOWNLOAD}/kernel.txz

new-vm: dist
	./bhyve.sh new ${VM}

Added experiments/bhyve/bhyve.sh.

























































































































































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
#!/bin/sh
set -e

# to mount and access the disk on the host:
# mdconfig -a -t vnode -f guest.img
# zpool import -f -R /mnt/myguest -N 16143602688976745622 -t zguest
# zfs mount -a
# zpool export zguest
# mdconfig -d -u 0

# configure a new disk
# truncate -s 10G new.img
# mdconfig -a -t vnode -f new.img
# gpart create -s gpt /dev/md0
# gpart add -a 4k -t freebsd-zfs /dev/md0
# zpool create -m none -o altroot=/mnt/myguest -o autoexpand=on -O atime=off -t zguest zroot /dev/md0p1


# this relies on host networking and NAT
## /etc/rc.conf:
# ifconfig_bridge1_name="bhyves"
# ifconfig_bhyves="inet 192.168.3.1/24 up"
#
## /etc/pf.conf:
# bhyve_if = "bhyves"
# bhyve_net = $bhyve_if:network
# 
# set skip on lo
# scrub in
# 
# nat on $ext_if from $bhyve_net -> ($ext_if:0)
# 
# pass out
# 
# block in
# 
# pass in proto tcp to port { 22 }
# pass in inet proto icmp icmp-type { echoreq }
# pass on $bhyve_if from $bhyve_net
##

load_vmm()
{
    if ! kldstat | awk '{print $5}' | grep '^vmm.ko$'; then
	kldload vmm
    fi
}

_ifconfig()
{
    iface=tap-${1}
    if ! ifconfig $iface > /dev/null; then
	ifconfig tap create name $iface
    fi
    (ifconfig bhyves | grep "member: $iface" > /dev/null) || ifconfig bhyves addm $iface
}

usage()
{
cat<<EOF
Usage:
  ./bhyve.sh boot <vm>
  ./bhyve.sh new <vm>
  ./bhyve.sh mount <vm>
  ./bhyve.sh unmount <vm>
EOF
exit 1
}

boot()
{
    vm=$1
    img=vms/${vm}.img
    if [ ! -f $img ]; then
	echo "error: $img not found"
	exit 1
    fi
    load_vmm > /dev/null
    _ifconfig $vm
    bhyveload -c stdio -m 4096M -d $img $vm
    bhyve -c 4 -m 4096M -s 0:0,hostbridge -s 1:0,lpc -s 2:0,virtio-net,${iface} -s 3:0,virtio-blk,${img} -H -A -P -l com1,stdio $vm
}

new_vm()
{
    vm=$1
    img=vms/${vm}.img
    root=/mnt/bhyve-${vm}
    vers=${VERS:-13.2-RELEASE}
    base=dist/${vers}-base.txz
    kernel=dist/${vers}-kernel.txz

    if [ ! -f $base -o ! -f $kernel ]; then
	echo "error: make sure $base and $kernel are present"
	echo "suggestion: make dist"
	exit 1
    fi

    mkdir -p vms
    truncate -s 24G $img
    mdconfig -a -t vnode -f $img
    gpart create -s gpt /dev/md0
    gpart add -a 4k -t freebsd-zfs /dev/md0
    zpool create -m / -o altroot=${root} -o autoexpand=on -O atime=off -t bhyve-${vm} zroot /dev/md0p1
    tar -C $root -xzf $base
    tar -C $root -xzf $kernel
    touch ${root}/etc/fstab
    sysrc -f ${root}/etc/rc.conf hostname="${vm}"
    sysrc -f ${root}/etc/rc.conf zfs_enable="YES"
    sysrc -f ${root}/boot/loader.conf zfs_load="YES"
    unmount $vm
}

unmount()
{
    vm=$1
    zpool export bhyve-${vm}
    md=$(mdconfig -l -f vms/${vm}.img | sed -e 's/^md//')
    mdconfig -d -u $md
    rmdir /mnt/bhyve-${vm}
}

_mount()
{
    vm=$1
    img=vms/${vm}.img
    if [ ! -f $img ]; then
	echo "error: $img not found"
	exit 1
    fi
    md=$(mdconfig -a -f $img)
    zid=$(zpool import -d /dev/${md}p1 | grep '^[[:space:]]* id: [[:digit:]]*' | awk '{print $2}')
    zpool import -f -R /mnt/bhyve-${vm} -t $zid bhyve-${vm}
}

case $1 in
    boot)
	if [ -z "$2" ]; then usage; fi
	boot $2
	;;
    new)
	if [ -z "$2" ]; then usage; fi
	new_vm $2
	;;
    mount)
	if [ -z "$2" ]; then usage; fi
	_mount $2
	;;
    unmount)
	if [ -z "$2" ]; then usage; fi
	unmount $2
	;;
    *)
	usage
	;;
esac

Added experiments/bhyve/bin/boot.sh.



















































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
#!/bin/sh

vm=$1
iface=tap-${vm}
img=vms/${vm}.img

if [ ! -f $img ]; then
    echo "error: $img not found"
    exit 1
fi

bhyve_exit=0

while [ $bhyve_exit -eq 0 ]; do
    bhyve -c 4 -m 4096M \
	  -s 0:0,hostbridge \
	  -s 1:0,lpc \
	  -s 2:0,virtio-net,${iface} \
	  -s 3:0,virtio-blk,${img} \
	  -H -A -P \
	  -l com1,stdio \
	  -l bootrom,/usr/local/share/uefi-firmware/BHYVE_UEFI.fd \
	  $vm
    bhyve_exit=$?
done

Added experiments/bhyve/bin/bridge.sh.











>
>
>
>
>
1
2
3
4
5
#!/bin/sh
set -e
if ! ifconfig $1 > /dev/null ; then
    ifconfig bridge create inet $2 name $1 up
fi

Added experiments/bhyve/bin/config.sh.

































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
#!/bin/sh

rc=${1}-rc.conf
root=/mnt/bhyve-${1}
vm_rc=${root}/etc/rc.conf

if [ ! -f $rc ]; then
   echo "error: missing $rc"
   exit 1
fi

sysrc -f ${root}/boot/loader.conf zfs_load="YES"

sysrc -f ${vm_rc} zfs_enable="YES"
commands=$(sed -e "s|^|sysrc -f $vm_rc |" $rc)
eval "$commands"

Added experiments/bhyve/bin/mount.sh.



























>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
#!/bin/sh

vm=$1
img=vms/${vm}.img

if [ ! -f $img ]; then
    echo "error: $img not found"
    exit 1
fi

md=$(mdconfig -a -f $img)
zid=$(zpool import -d /dev/${md}p1 | grep '^[[:space:]]* id: [[:digit:]]*' | awk '{print $2}')
zpool import -f -R /mnt/bhyve-${vm} -t $zid bhyve-${vm}

Added experiments/bhyve/bin/new-vm.sh.

















































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
#!/bin/sh
set -e

vm=$1
img=vms/${vm}.img
root=/mnt/bhyve-${vm}
boot=${root}-boot
vers=${VERS:-13.2-RELEASE}
base=dist/${vers}-base.txz
kernel=dist/${vers}-kernel.txz

if [ ! -f $base -o ! -f $kernel ]; then
    echo "error: make sure $base and $kernel are present"
    echo "suggestion: make dist"
    exit 1
fi

mkdir -p vms
truncate -s 24G $img

md=$(mdconfig -a -t vnode -f $img)
disk=/dev/${md}

# boot partition
gpart create -s gpt $disk
gpart add -a 4k -s 40M -t efi $disk
newfs_msdos -F 32 -c 1 ${disk}p1

# root partition
gpart add -a 4k -t freebsd-zfs $disk
zpool create -m / -o altroot=${root} -o autoexpand=on -O atime=off -t bhyve-${vm} zroot ${disk}p2
tar -C $root -xzf $base
tar -C $root -xzf $kernel
touch ${root}/etc/fstab

# copy boot loader
mount -t msdosfs -o longnames ${disk}p1 $boot
mkdir -p ${boot}/EFI/BOOT
cp ${root}/boot/loader.efi ${boot}/EFI/BOOT/BOOTX64.efi
umount $boot

Added experiments/bhyve/bin/tap.sh.























>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
#!/bin/sh
set -e

iface=tap-${1}
bridge=$2

if ! ifconfig $iface > /dev/null; then
    ifconfig tap create name $iface
fi

(ifconfig $bridge | grep "member: $iface" > /dev/null) || ifconfig $bridge addm $iface

Added experiments/bhyve/bin/unmount.sh.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
#!/bin/sh

vm=$1
zpool export bhyve-${vm}
md=$(mdconfig -l -f vms/${vm}.img | sed -e 's/^md//')
mdconfig -d -u $md
rmdir /mnt/bhyve-${vm}

Added experiments/bhyve/example-rc.conf.







>
>
>
1
2
3
hostname="example"
ifconfig_vtnet0="192.168.6.2/24"
defaultrouter="192.168.6.1"

Added experiments/bhyve/example.mk.









































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
.PHONY: vm bridge iface boot

vm: vms/example.img

bridge:
	./bin/bridge.sh bh2 192.168.5.1/24

iface: bridge
	./bin/tap.sh example bh2

vms/example.img: example-rc.conf
	if [ -f ${.TARGET} ]; then ./bin/mount.sh example; else ./bin/new-vm.sh example; fi
	./bin/config.sh example
	./bin/unmount.sh example

boot: vms/example.img iface
	./bin/boot.sh example

destroy:
	if [ -f /dev/vmm/example ]; then bhyvectl --destroy --vm=example; fi

Added experiments/duckdb_cross_db_join.sh.











































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
#!/bin/sh
set -e

work=$(mktemp -d -t duckdb-cross-db)
cd $work

cat <<EOF | duckdb
attach 'db1.db';
create table db1.ids (id integer primary key, name string not null);
insert into db1.ids values(1, 'pat');
insert into db1.ids values(2, 'edgar');

attach 'db2.db';
create table db2.traits (name string primary key, species string not null);
insert into db2.traits values('pat', 'human');
insert into db2.traits values('edgar', 'dog');

select ids.*, traits.species from db1.ids ids join db2.traits traits on ids.name=traits.name;
EOF

echo "DONE: $work"

Added experiments/dynmk/.gitignore.





>
>
1
2
out
out2

Added experiments/dynmk/Makefile.





























































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
.MAIN: all

SRCSUB = C/^src\///
OUTSUB = C/^out\///

clean:
	rm -rf out out2

INFILES != find src -type f -not -name '*~'

.for f in ${INFILES}
out/${f:${SRCSUB}}: out ${f}
	cp ${f} ${.TARGET}
all: out/${f:${SRCSUB}}
.endfor

out:
	mkdir out
out2:
	mkdir out2

.if exists(out)
NEWFILES != find out -type f
.for f in ${NEWFILES}
out2/${f:${OUTSUB}}: out2 ${f}
	cp ${f} ${.TARGET}

all: out2/${f:${OUTSUB}}
.endfor
.endif

Added experiments/dynmk/README.md.





















































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
# Dynamic Make

What I want: run `make` one time to produce files under `out/` and `out2/`.

What happens: I have to run `make` twice.
The first run produces `out/*`, and the second produces `out2/*`.

Why? As I understand it, make builds its targets statically in one pass.
The first time this runs, there is nothing in `out/`, so `NEWFILES != find out -type f` is empty.

Is there a way to add targets after another target runs, or as part of a run?
I don't want to evaluate the `out2/*` targets until after `out/*` has completed.

I have tried "recursive make" where I define a target like:

```make
all:
	make out
	make out2
```

The problem I've found with that is that stderr doesn't seem to be redirected properly.
Any stderr from `make out2` is printed on stdout on the top-level make.

I would like to do this with a single non-recursive make target if possible.
It may not be possible though.

Added experiments/dynmk/src/bar.

Added experiments/dynmk/src/foo.

Added experiments/fossil-child/fossil-child.sh.





































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
#!/bin/sh
set -e
work=$(mktemp -d -t fossil-child)

cd $work
mkdir fossils parent child

fossil new fossils/parent.fossil
cd parent
fossil open ../fossils/parent.fossil
cat <<EOF | sqlite3 ../fossils/parent.fossil
INSERT INTO config(name,value)
   VALUES('project-name','fossil parent repo');
EOF

echo '== adding parent-foo'
echo 'parent-foo 1' > parent-foo
fossil add parent-foo
fossil commit -m 'add parent-foo'

echo '== cloning to child'
cd ..
fossil clone fossils/parent.fossil fossils/child.fossil

cd child
fossil open ../fossils/child.fossil
fossil remote off
cat <<EOF | sqlite3 ../fossils/child.fossil
UPDATE config SET name='parent-project-code' WHERE name='project-code';
UPDATE config SET name='parent-project-name' WHERE name='project-name';
INSERT INTO config(name,value)
   VALUES('project-code',lower(hex(randomblob(20))));
INSERT INTO config(name,value)
   VALUES('project-name','fossil child repo');
EOF

echo '== adding child bar'
echo 'child-bar 1' > child-bar
fossil add child-bar
fossil commit -m 'BAD add child-bar'
echo 'child-bar 2' >> child-bar
fossil commit -m 'update child-bar'

echo '== updating parent-foo'
cd ../parent
echo 'parent-foo 2' >> parent-foo
fossil commit -m 'update parent-foo'

echo '== pulling parent into child'
cd ../child
fossil pull --from-parent-project ../fossils/parent.fossil
fossil up trunk
fossil status
fossil timeline -b trunk
fossil ls

echo '== closing branch'
bad=$(fossil timeline -F '%h %c' | grep BAD | awk '{print $1}')
fossil amend $bad --branch mistake -m 'shunt bad series of commits'

fossil up trunk
fossil status
fossil timeline -b trunk
fossil ls

echo $work

Added experiments/fossil_fork.sh.



















































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
#!/bin/sh
set -e

# Fossil pulls before committing.
# What happens when it does that?
# Fossil rejects the final commit that would result in a fork.
# User needs to `fossil update` to get the changes.

_header()
{
    dash="===="
    echo "${dash} ${1} ${dash}"
}

_header 'BEGIN'
server=$(mktemp -d)
server_db=${server}/fossil.db
fork_a=$(mktemp -d)
fork_b=$(mktemp -d)

_header 'INIT server'
cd ${server}
fossil init ${server_db}
fossil open ${server_db} --workdir ${server}

_header 'COMMIT #1 to fork a'
cd ${fork_a}
fossil clone --workdir ${fork_a} ${server_db}
echo fork_a > fork_a
fossil add fork_a
fossil commit -m 'fork_b'

_header 'COMMIT #1 to fork b'
cd ${fork_b}
fossil clone --workdir ${fork_b} ${server_db}
echo fork_b > fork_b
fossil add fork_b
fossil commit -m 'fork_b'

_header 'COMMIT #2 to fork a - results in fork'
cd ${fork_a}
echo 'fork a 2' >> fork_a
set +e
fossil commit -m 'another on fork a'

_header 'UPDATE fork a and commit'
set -e
fossil update
fossil commit -m 'another on fork a'

_header 'DONE'

cat <<EOF
- server: ${server}
- fork_a: ${fork_a}
- fork_b: ${fork_b}
EOF

Added experiments/fossil_publish.sh.





































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
#!/bin/sh
set -e

work=$(mktemp -d)
cd $work

fossil new publish.fossil
fossil open publish.fossil

echo "foo 1" >> foo
fossil add foo
fossil commit -m "foo 1" --branch drafts

echo "bar 1" >> bar
fossil add bar
fossil commit -m "bar 1"

echo "foo 2" >> foo
fossil commit -m "foo 2"

echo "bar 2" >> bar
fossil commit -m "bar 2"

cp foo foo.publish

fossil update trunk
mv foo.publish foo
fossil add foo
fossil commit -m "publish foo"

fossil update drafts
fossil merge trunk

echo $work

Added experiments/fossil_squash/branch_and_merge.sh.





































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
#!/bin/sh

# Following Andy Bradford's suggestion from https://fossil-scm.org/forum/forumpost/a807940359

work=$(mktemp -d)
cd $work

fossil init test.fossil
fossil open test.fossil

echo "foo 1" >> foo
fossil add foo
fossil commit -m "foo"

echo "bar 1" >> bar
fossil add bar
fossil commit -m "bar 1" --branch do-bar

echo "bar 2" >> bar
fossil commit -m "bar 2"

echo "bar 3" >> bar
fossil commit -m "bar 3"

fossil update trunk

echo "foo 2" >> foo
fossil commit -m "foo 2"

fossil merge do-bar
fossil commit -m "merge do-bar"
fossil timeline -t ci

echo "done: ${work}"

Added experiments/fossil_squash/fossil_squash.sh.













































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
#!/bin/sh
workdir=$(mktemp -d)
echo "workdir: ${workdir}"
fossil new ${workdir}/squash.fossil > /dev/null
fossil open ${workdir}/squash.fossil --workdir ${workdir} > /dev/null

cd ${workdir}

add_commit()
{
    echo $1 > $1
    fossil add $1 > /dev/null
    echo $(fossil commit -m "add $1" | grep New_Version: | awk '{print $2}') | cut -c 1-6
}

foo=$(add_commit foo)
bar=$(add_commit bar)
baz=$(add_commit baz)

echo "BEFORE squash"
fossil timeline -t ci

cat > message <<EOF
Nice and clean

squashes:
[$baz]
[$bar]
[$foo]
EOF

fossil commit --allow-empty -M message > /dev/null
for commit in $foo $bar $baz; do
    fossil tag add --raw hidden $commit #> /dev/null
done

echo "AFTER squash - timeline still shows hidden commits, ui hides them"
fossil timeline -t ci

Added experiments/man-compare/Makefile.





































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
all: _build/foo-pandoc.1 _build/foo-md2man.1 _build/foo-pandoc.1.html # _build/foo-pandoc.1.pdf

_build/foo-pandoc.1: foo.1.md
	@mkdir -p ${.TARGET:H}
	pandoc --standalone --to man ${.ALLSRC} -o ${.TARGET}

_build/foo-md2man.1: foo.1.md
	@mkdir -p ${.TARGET:H}
	go-md2man -in ${.ALLSRC} -out ${.TARGET}

_build/foo-pandoc.1.html: foo.1.md
	@mkdir -p ${.TARGET:H}
	pandoc --standalone --to html ${.ALLSRC} -o ${.TARGET}

_build/foo-pandoc.1.pdf: foo.1.md
	@mkdir -p ${.TARGET:H}
	pandoc --standalone --to pdf ${.ALLSRC} -o ${.TARGET}

Added experiments/man-compare/README.md.

















































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
# Comparing man page authoring tools

Man pages are super useful, but I find the writing format really ugly.
There has to be a better way... right?
This is my exploration into different tools for authoring man pages.

Possible tools:

- straight up mdoc / mandoc
- [org-mode](https://orgmode.org/worg/org-tutorials/org-e-man-documentation.html) (exports to groff)
- docbook
  - [docbook2mdoc](https://mandoc.bsd.lv/docbook2mdoc/)
  - [laserhammer](https://github.com/trasz/laserhammer)
- POD
  - [pod2mdoc](https://mandoc.bsd.lv/pod2mdoc/)
- asciidoc
  - [asciidoctor](https://docs.asciidoctor.org/asciidoctor/latest/manpage-backend/)
- markdown to man
  - [markdown2man](https://manpages.ubuntu.com/manpages/lunar/man1/markdown2man.1.html)
  - pandoc - lots of blog posts about this, seems maybe good
  - [ronn](https://github.com/rtomayko/ronn)
  - [md2man](https://github.com/sunaku/md2man)
  - [go-md2man](https://github.com/cpuguy83/go-md2man)
  - [um](https://github.com/sinclairtarget/um) - tool to maintain your own copies of man pages
  
in ports:

- docbook2mdoc
- emacs
- pod2mdoc
- asciidoc
- pandoc
- go-md2man
- laserhammer (no maintainer)

tried, rejected:

- ronn

not in ports:

- asciidoctor
- markdown2man
- md2man
- um

Keyword I'm looking for is "mandoc converter" or "mdoc converter"

Questions:

- how can I make emacs read a specific file as a man page?

Notes:

- freebsd uses `mdoc(7)` format, rendered by `mandoc(1)`
- `mandoc(1)` outputs to ASCII, HTML, PDF
- mdoc is a pretty big language
- [blog post about taking notes in man format](https://gpanders.com/blog/write-your-own-man-pages/)
  - actually about markdown, but rendering to man
  
pandoc:

- https://gabmus.org/posts/man_pages_with_markdown_and_pandoc/
- https://gpanders.com/blog/write-your-own-man-pages/
- https://www.dlab.ninja/2021/10/how-to-write-manpages-with-markdown-and.html?m=1
- https://eddieantonio.ca/blog/2015/12/18/authoring-manpages-in-markdown-with-pandoc/
- http://jeromebelleman.gitlab.io/posts/publishing/manpages/

Related:

I want good general documentation tools as well, for writing reference manuals / guides.
FreeBSD has an excellent documentation toolchain, as does GNU.

Added experiments/man-compare/foo.1.md.















































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
% foo(1) | User Commands
% Pat Maddox

# NAME

foo

# SYNOPSIS

`foo [-bar -baz]`  
`foo [--qux arg1 ...]`

# DESCRIPTION

This is foo.
It is the first command.
You will probably run it.

# ENVIRONMENT

`BAR`
: same as `-bar`

# EXIT STATUS

1
: something went wrong

2
: something went really wrong

3
: run for cover

# EXAMPLES

foo with bar:
: `foo -bar`

foo with qux:
: `foo --qux file1 file2`

foo with many qux:
:
```
foo --qux file1
  file2
  file3
```

# COMPATIBILITY

FreeBSD >= 13.2

# SEE ALSO

bar(1)

# STANDARDS

foo.conf(5)

# HISTORY

Widely considered to be the first program ever written.
Yes, it precedes "hello world".

# BUGS

No way.
I code good.

Added experiments/vale/README.md.



































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
# vale experiments

Notes:

- This requires the `nm_test` jail to have a devfs ruleset that shows `netmap` device.
- Set up a bridge with one of the `bridge_*.sh` scripts, and then run tests.
- It's possible that test results differ across machines.

## bridges

- `bridge_epair.sh` - crashes. creates epair devices on each host, bridges (?) them using `valectl`.
  I think `valectl` bridges them, but I'm not sure.

## tests

- `ping 192.168.6.2`
- `jexec nm_test ping 192.168.6.1`

Added experiments/vale/bridge_epair.sh.





























>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
#!/bin/sh
set -e

ifconfig epair10 create inet 192.168.6.1/24 up
ifconfig epair10b up
valectl -h vale1:epair10b

service jail onestart nm_test
j="jexec nm_test"
$j ifconfig epair11 create inet 192.168.6.2/24 up
$j ifconfig epair11b up
$j valectl -h vale1:epair11b

valectl

Added freebsd-prs/274824-arp-cannot-delete-entry/README.md.







>
>
>
1
2
3
# if\_bridge / if\_epair: unable to delete an arp entry after deleting associated route

https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=274824

Added freebsd-prs/274824-arp-cannot-delete-entry/bug.sh.











































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
#!/bin/sh
set -x

iface=${1:-bridge}
ip=192.168.99.1
ether=01:23:45:67:89:a0

if [ "$iface" = "epair" -o "$iface" = "bridge" ]; then
    iface=${iface}10
    ifconfig $iface create 192.168.42.1/32 up # FreeBSD 14 needs an IP to add routes
    if [ "$iface" = "epair10" ]; then
	iface="epair10a"
    fi
fi

echo ==== THIS WILL SUCCEED:
route add $ip -iface $iface
sleep 1
arp -s $ip $ether temp
arp -d $ip

echo ==== THIS WILL FAIL: "arp: writing to routing socket: No such file or directory"
arp -s $ip $ether temp
route delete $ip
sleep 1 
arp -d $ip

echo ==== THIS WILL SUCCEED
route add $ip -iface $iface
sleep 1
arp -d $ip

route delete $ip

if [ "$iface" = "epair10a" -o "$iface" = "bridge10" ]; then
    ifconfig $iface destroy
fi

Changes to infra/Mk/base.mk.

10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28













































































































































































































































































29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50

51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74

BUILDDIR=	${.CURDIR}/_build
BOOTENV=	${BUILDDIR}/${SNAPSHOT}.be
IMGSIZE?=	20GB
ZPOOL=		${NAME}-pkgbase
ROOT=		/${ZPOOL}
CONFIG=		${ROOT}/config
PKG_CACHEDIR=	/var/cache/pkg
PKG=		PKG_CACHEDIR=${PKG_CACHEDIR} ABI=${ABI} IGNORE_OSVERSION=yes pkg -r ${ROOT}
CHROOTPKG=	ABI=${ABI} IGNORE_OSVERSION=yes pkg -c ${ROOT}

GUARD_ROOT=	@if [ $$(id -g) -ne 0 ]; then echo "E: must run as root" 1>&2; exit 1; fi

PACKAGES!=	cat packages
INITPACKAGES=	ca_root_nss \
		pkg

GROUPS+=	wheel














































































































































































































































































${BOOTENV}:
	${GUARD_ROOT}
	@mkdir -p ${BUILDDIR}
	if mount | grep -q '${ROOT}${PKG_CACHEDIR}'; then umount ${ROOT}/${PKG_CACHEDIR}; fi
	if zpool status ${ZPOOL} > /dev/null 2>&1; then zpool destroy ${ZPOOL}; fi
	rm -f ${BUILDDIR}/${ZPOOL}.zpool
	truncate -s ${IMGSIZE} ${BUILDDIR}/${ZPOOL}.zpool
	zpool create ${ZPOOL} ${BUILDDIR}/${ZPOOL}.zpool

	mkdir -p ${ROOT}${PKG_CACHEDIR}
	mount_nullfs ${PKG_CACHEDIR} ${ROOT}${PKG_CACHEDIR}

	${PKG} update -r ${BASEREPO}
	@echo "installing base packages..."
	@${PKG} install -q -y -r ${BASEREPO} $$(${PKG} search -r ${BASEREPO} -q FreeBSD- 2>/dev/null | grep -v '\-dbg\-[[:digit:]]*.' | grep -v '\-lib32\-[[:digit:]]*.' | grep -v '\-src\-[[:digit:]]*.')

	@mkdir -p ${ROOT}/usr/share/keys/pkg/trusted
	cp /usr/share/keys/pkg/trusted/* ${ROOT}/usr/share/keys/pkg/trusted/
	${PKG} update -r ${PKGREPO}

	@echo "installing initial packages..."
	${PKG} install -q -y -r ${PKGREPO} ${INITPACKAGES}


	@mkdir -p ${CONFIG}
	echo ${USERS:O:u} > ${CONFIG}/users
	echo ${GROUPS:O:u} > ${CONFIG}/groups

	for f in $$(find files -type f -not -name '*~'); do \
		newfile=$$(echo $${f} | sed 's|^files|${ROOT}|') && \
		mkdir -p $$(dirname $${newfile}) && \
		${INSTALL} -o 0 -g 0 -m 0644 $${f} $${newfile} \
	; done
	${INSTALL} -o 0 -g 0 -m 0744 ${.CURDIR}/../share/syncbe.sh ${ROOT}/

#	chroot install everything else because package install scripts that may not respect -r
	@echo "installing packages..."
	${CHROOTPKG} update -r ${PKGREPO}
	${CHROOTPKG} install -q -y -r ${PKGREPO} ${PACKAGES:O:u}
	certctl -D ${ROOT} rehash

	echo ${KEEPFILES} > ${CONFIG}/keepfiles.tmp
	cat ${CONFIG}/keepfiles >> ${CONFIG}/keepfiles.tmp
	sort -u ${CONFIG}/keepfiles.tmp > ${CONFIG}/keepfiles
	rm ${CONFIG}/keepfiles.tmp

	touch ${ROOT}/etc/fstab







<
|
<




|




>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>



<





<
<
<


|




<
|
|
>





|
<
<
<
<
<
|
<
<
<
<
<







10
11
12
13
14
15
16

17

18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298

299
300
301
302
303



304
305
306
307
308
309
310

311
312
313
314
315
316
317
318
319





320





321
322
323
324
325
326
327

BUILDDIR=	${.CURDIR}/_build
BOOTENV=	${BUILDDIR}/${SNAPSHOT}.be
IMGSIZE?=	20GB
ZPOOL=		${NAME}-pkgbase
ROOT=		/${ZPOOL}
CONFIG=		${ROOT}/config

PKG=		PKG_CACHEDIR=/var/cache/pkg ABI=${ABI} IGNORE_OSVERSION=yes pkg -r ${ROOT}


GUARD_ROOT=	@if [ $$(id -g) -ne 0 ]; then echo "E: must run as root" 1>&2; exit 1; fi

PACKAGES!=	cat packages
PACKAGES+=	ca_root_nss \
		pkg

GROUPS+=	wheel

BASE_PACKAGES+= acct \
		acct-man \
		acpi \
		acpi-man \
		apm \
		apm-man \
		at \
		at-man \
		autofs \
		autofs-man \
		bhyve \
		bhyve-man \
		blocklist \
		blocklist-dev \
		blocklist-man \
		bluetooth \
		bluetooth-dev \
		bluetooth-man \
		bootloader \
		bootloader-dev \
		bsdinstall \
		bsdinstall-man \
		bsnmp \
		bsnmp-dev \
		bsnmp-man \
		caroot \
		ccdconfig \
		ccdconfig-man \
		certctl \
		certctl-man \
		clang \
		clang-dev \
		clang-man \
		clibs \
		clibs-dev \
		clibs-man \
		console-tools \
		console-tools-man \
		ctf-tools \
		ctf-tools-man \
		cxgbe-tools \
		cxgbe-tools-man \
		devd \
		devd-man \
		devmatch \
		devmatch-dev \
		devmatch-man \
		dhclient \
		dhclient-man \
		dma \
		dma-man \
		dtrace \
		dtrace-dev \
		dtrace-man \
		dwatch \
		dwatch-man \
		ee \
		ee-man \
		efi-tools \
		efi-tools-dev \
		efi-tools-man \
		elftoolchain \
		elftoolchain-man \
		fetch \
		fetch-dev \
		fetch-man \
		ftp \
		ftp-man \
		ftpd \
		ftpd-man \
		fwget \
		fwget-man \
		geom \
		geom-man \
		ggate \
		ggate-man \
		hast \
		hast-man \
		hostapd \
		hostapd-man \
		hyperv-tools \
		inetd \
		inetd-man \
		ipf \
		ipf-man \
		ipfw \
		ipfw-man \
		iscsi \
		iscsi-man \
		jail \
		jail-man \
		kerberos \
		kerberos-lib \
		kerberos-lib-dev \
		kerberos-lib-man \
		kerberos-man \
		kernel-generic \
		lib9p \
		lib9p-dev \
		libarchive \
		libarchive-dev \
		libarchive-man \
		libbegemot \
		libbegemot-dev \
		libbegemot-man \
		libblocksruntime \
		libblocksruntime-dev \
		libbsdstat \
		libbsdstat-dev \
		libbsm \
		libbsm-dev \
		libbsm-man \
		libbz2 \
		libbz2-dev \
		libcasper \
		libcasper-dev \
		libcasper-man \
		libcompat-dev \
		libcompat-man \
		libcompiler_rt-dev \
		libcuse \
		libcuse-dev \
		libcuse-man \
		libdwarf \
		libdwarf-dev \
		libdwarf-man \
		libevent1 \
		libevent1-dev \
		libexecinfo \
		libexecinfo-dev \
		libexecinfo-man \
		libipt \
		libipt-dev \
		libldns \
		libldns-dev \
		liblzma \
		liblzma-dev \
		libmagic \
		libmagic-dev \
		libmagic-man \
		libpathconv \
		libpathconv-dev \
		libpathconv-man \
		librpcsec_gss \
		librpcsec_gss-dev \
		librpcsec_gss-man \
		librss \
		librss-dev \
		libsdp \
		libsdp-dev \
		libsdp-man \
		libsqlite3 \
		libsqlite3-dev \
		libstdbuf \
		libstdbuf-dev \
		libstdbuf-man \
		libstdthreads \
		libstdthreads-dev \
		libstdthreads-man \
		libthread_db \
		libthread_db-dev \
		libucl \
		libucl-dev \
		libucl-man \
		libvgl \
		libvgl-dev \
		libvgl-man \
		libvmmapi \
		libvmmapi-dev \
		liby-dev \
		lld \
		lld-man \
		lldb \
		lldb-man \
		locales \
		mlx-tools \
		mlx-tools-man \
		mtree \
		mtree-man \
		natd \
		natd-dev \
		natd-man \
		netmap \
		netmap-dev \
		netmap-man \
		newsyslog \
		newsyslog-man \
		nfs \
		nfs-man \
		nuageinit \
		nvme-tools \
		nvme-tools-man \
		openssl \
		openssl-lib \
		openssl-lib-dev \
		openssl-lib-man \
		openssl-man \
		periodic \
		periodic-man \
		pf \
		pf-dev \
		pf-man \
		pkg-bootstrap \
		pkg-bootstrap-man \
		ppp \
		ppp-man \
		quotacheck \
		quotacheck-man \
		rc \
		rc-man \
		rcmds \
		rcmds-man \
		rdma \
		rdma-man \
		rescue \
		resolvconf \
		resolvconf-man \
		runtime \
		runtime-dev \
		runtime-man \
		sendmail \
		sendmail-dev \
		sendmail-man \
		smbutils \
		smbutils-dev \
		smbutils-man \
		src \
		src-sys \
		ssh \
		ssh-dev \
		ssh-man \
		syscons \
		syslogd \
		syslogd-man \
		tcpd \
		tcpd-dev \
		tcpd-man \
		telnet \
		telnet-man \
		tests \
		tests-dev \
		tests-man \
		ufs \
		ufs-dev \
		ufs-man \
		unbound \
		unbound-dev \
		unbound-man \
		utilities \
		utilities-dev \
		utilities-man \
		vi \
		vi-man \
		vt-data \
		yp \
		yp-man \
		zfs \
		zfs-dev \
		zfs-man \
		zoneinfo
#	csh \
#	csh-man \
#	games \
#	games-man \
#	wpa \
#	wpa-man \

BASE_PACKAGES_LIST=	${BASE_PACKAGES:O:u:S/^/FreeBSD-/:S/$/-${SNAPSHOT:S/-/./}/}

${BOOTENV}:
	${GUARD_ROOT}
	@mkdir -p ${BUILDDIR}

	if zpool status ${ZPOOL} > /dev/null 2>&1; then zpool destroy ${ZPOOL}; fi
	rm -f ${BUILDDIR}/${ZPOOL}.zpool
	truncate -s ${IMGSIZE} ${BUILDDIR}/${ZPOOL}.zpool
	zpool create ${ZPOOL} ${BUILDDIR}/${ZPOOL}.zpool




	${PKG} update -r ${BASEREPO}
	@echo "installing base packages..."
	${PKG} install -q -y -r ${BASEREPO} ${BASE_PACKAGES_LIST}

	@mkdir -p ${ROOT}/usr/share/keys/pkg/trusted
	cp /usr/share/keys/pkg/trusted/* ${ROOT}/usr/share/keys/pkg/trusted/
	${PKG} update -r ${PKGREPO}

	@echo "installing packages..."
	${PKG} install -q -y -r ${PKGREPO} ${PACKAGES:O:u}
	certctl -D ${ROOT} rehash

	@mkdir -p ${CONFIG}
	echo ${USERS:O:u} > ${CONFIG}/users
	echo ${GROUPS:O:u} > ${CONFIG}/groups

	for f in $$(find files -type f -not -name '*~'); do ${INSTALL} -o 0 -g 0 -m 0644 $${f} $$(echo $${f} | sed 's|^files|${ROOT}|'); done





	chmod 744 ${ROOT}/syncbe.sh






	echo ${KEEPFILES} > ${CONFIG}/keepfiles.tmp
	cat ${CONFIG}/keepfiles >> ${CONFIG}/keepfiles.tmp
	sort -u ${CONFIG}/keepfiles.tmp > ${CONFIG}/keepfiles
	rm ${CONFIG}/keepfiles.tmp

	touch ${ROOT}/etc/fstab

Changes to infra/gulliver/Makefile.

1
2
3
4




5
NAME=		gulliver
USERS=		patmaddox
GROUPS=		video patmaddox





.include "../Mk/base.mk"


|

>
>
>
>

1
2
3
4
5
6
7
8
9
NAME=		gulliver
USERS=		patmaddox
GROUPS=		video

BASE_PACKAGES+=	wpa \
		wpa-man
# missing: denot.el ob-go

.include "../Mk/base.mk"

Deleted infra/gulliver/files/etc/resolv.conf.

1
2
nameserver 8.8.8.8
nameserver 8.8.4.4
<
<




Added infra/gulliver/files/syncbe.sh.





















































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
#!/bin/sh
set -e

sync_users()
{
    pw usershow -n root | cut -f 2 -d ':' | pw -R ${root} usermod -n root -H 0

    for u in $(cat ${config}/users); do
	if pw -R ${root} usershow -n ${u} > /dev/null 2>&1; then
	    pw -R ${root} userdel -n ${u}
	fi
	pw usershow -n ${u} >> ${root}/etc/master.passwd
    done

    pwd_mkdb -p -d ${root}/etc ${root}/etc/master.passwd
}

sync_groups()
{
    for g in $(cat ${config}/groups); do
	users=$(pw groupshow -n ${g} | sed 's/.*://')
	pw -R ${root} groupmod -n ${g} -m ${users}
    done
}

sync_keepfiles()
{
    for f in $(cat ${config}/keepfiles); do
	cp ${f} ${root}${f}
    done    
}

main() {
    local root=$(dirname $(realpath $0))
    local config=${root}/config
    
    sync_users
    sync_groups
    sync_keepfiles
}

main

Deleted infra/gulliver/files/usr/local/etc/pkg/repos/freebsd.conf.

1
2
3
4
5
6
7
8
9
FreeBSD: { enabled: no }

freebsd: {
  url: "https://pkg.FreeBSD.org/${ABI}/latest",
  signature_type: "fingerprints",
  fingerprints: "/usr/share/keys/pkg",
  priority: 0,
  enabled: yes
}
<
<
<
<
<
<
<
<
<


















Deleted infra/patmaddox.com/Makefile.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
.PHONY: all cert test clean

CERTFILES= ${HOME}/Downloads/www_patmaddox_com.pem ${HOME}/Downloads/www_patmaddox_com.key

all: cert test clean

cert: ${CERTFILES}
	cat ${.ALLSRC} | ssh root@patmaddox.com "cat > /root/www_patmaddox_com.haproxy.pem && service haproxy reload"

test:
	echo | openssl s_client -showcerts -servername patmaddox.com -connect patmaddox.com:443 2>/dev/null | openssl x509 -inform pem -noout -text

clean:
	rm -f ${CERTFILES}
<
<
<
<
<
<
<
<
<
<
<
<
<
<




























Deleted infra/patmaddox.com/haproxy.conf.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
global
	daemon
defaults
	mode http
	timeout connect 5000ms
	timeout client 50000ms
	timeout server 50000ms

frontend patmaddox.com
	bind 45.32.226.152:80
	bind 45.32.226.152:443 ssl crt /root/www_patmaddox_com.haproxy.pem
	http-request redirect scheme https unless { ssl_fc }
	redirect prefix https://patmaddox.com code 301 unless { hdr(host) -i patmaddox.com }
	option forwardfor
	default_backend fossil

backend fossil
	server server1 127.0.0.1:8080
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




































Deleted infra/share/syncbe.sh.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
#!/bin/sh
set -e

sync_users()
{
    pw usershow -n root | cut -f 2 -d ':' | pw -R ${root} usermod -n root -H 0

    for u in $(cat ${config}/users); do
	if pw -R ${root} usershow -n ${u} > /dev/null 2>&1; then
	    pw -R ${root} userdel -n ${u}
	fi
	pw usershow -n ${u} >> ${root}/etc/master.passwd
    done

    pwd_mkdb -p -d ${root}/etc ${root}/etc/master.passwd
}

sync_groups()
{
    for g in $(cat ${config}/groups); do
	if ! pw -R ${root} groupshow -n ${g} > /dev/null 2>&1; then
	    gid=$(pw groupshow -n ${g} | sed -E 's/^.*:.*:([[:digit:]]+):*/\1/')
	    pw -R ${root} groupadd -n ${g} -g ${gid}
	fi
	
	users=$(pw groupshow -n ${g} | sed 's/.*://')
	pw -R ${root} groupmod -n ${g} -m ${users}
    done
}

sync_keepfiles()
{
    for f in $(cat ${config}/keepfiles); do
	if [ -f ${f} ]; then cp ${f} ${root}${f}; fi
    done    
}

main() {
    local root=$(dirname $(realpath $0))
    local config=${root}/config
    
    sync_users
    sync_groups
    sync_keepfiles
}

main
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






























































































Added jails/basic/Makefile.

























































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
# make the base dir
# extract base.txz
# start it
# stop it
# restart it
# clean up base
# copy config files
# destroy
NAME=		basic
ROOT=		/jails/${NAME}
DISTDIR=	${.CURDIR}/../dist
BASE.TXZ=	${DISTDIR}/13.2-RELEASE-base.txz
BASE_DOWNLOAD=	https://download.freebsd.org/releases/amd64/13.2-RELEASE/base.txz
#IP=		192.168.2.3/24
SCRIPTS=	${.CURDIR}/../scripts
IFACE?=		${NAME}
CONF=		${NAME}.conf

.MAIN: all
.PHONY: all login start stop restart

all: ${ROOT} ${ROOT}/COPYRIGHT

${ROOT}:
	mkdir ${ROOT}

${ROOT}/COPYRIGHT: ${BASE.TXZ}
	tar -C ${ROOT} -xf ${BASE.TXZ}
	touch ${.TARGET}

${BASE.TXZ}:
	@mkdir -p ${DISTDIR}
	fetch -q -o ${BASE.TXZ} ${BASE_DOWNLOAD}

start:
	jail -f ${CONF} -c ${NAME}

stop:
	jail -f ${CONF} -r ${NAME}

restart: stop start

login:
	jexec ${NAME} login -f root

Added jails/basic/basic.conf.































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
basic {
  $ip=192.168.2.3/24;
  path=/jails/${name};
  vnet;
  host.hostname=${name};
  exec.clean;
  exec.prepare="~patmaddox/jails/scripts/jlif ${name} ${ip}";
  exec.start="sh /etc/rc";
  exec.stop="sh /etc/rc.shutdown jail";
  exec.release="ifconfig ${name}a destroy";
  mount.devfs;
  allow.mount;
  allow.mount.devfs;
  allow.mount.tmpfs;
}

Added jails/scripts/jlif.



































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
#!/bin/sh
main()
{
    name=$1; shift
    ip=$2; shift

    iface=${name}

    aname=$(ifconfig epair create up)
    bname=$(echo ${aname} | sed -e 's/a$/b/')

    ifconfig jails addm ${aname}
    ifconfig ${aname} name ${iface}a
    ifconfig ${bname} name ${iface}b
}

main "${@}"

Added justlib/doctor.sh.



































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
doctor() {
    name=$1
    cmd=$2
    remedy=$3
    timeout=${4:-1s}

    echo -n "checking $name... "
    fullcmd="timeout $timeout $cmd"

    if eval $fullcmd > /dev/null; then
	echo "done"
    else
	echo "FAIL"
	echo "suggested remedy: $remedy"
	exit 1
    fi
}

Added org/pub.org.





























































































































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
* tickets
imported from patmaddox.com
** TODO Compare C interop of different languages                    :current:
Elixir, Rust, Go, D

Make a simple function that returns 1 (or possibly sums a couple numbers).

Call it from each of the languages.

Profile and dtrace.

Integrate with libucl(3) and libgit2 (or any other useful libs).

libucl(3) would be a great library to demo.

- Crystal
- OCaml lisp
- haxe
- Common Lisp, Haskell
- C++
** TODO freebsd-ports: build my own updated ports                   :current:
overlay my changes, or commit to the ports repo?

I think for new ports, it makes sense to use an overlay - since they are totally new files.

For updating existing ports, it makes sense to keep updates per branch, or maybe all on a single branch, and rebase / cherry-pick them onto main.

It's possible to clone a single directory.

    https://askubuntu.com/questions/460885/how-to-clone-only-some-directories-from-a-git-repository
    https://stackoverflow.com/questions/72223463/how-to-clone-specific-folder-from-git-using-sparse

Unblocks [fossil: build a package from source]
worktrees and sparse-checkout works well:

git worktree add --no-checkout ../editors-emacs
cd ../editors-emacs
git sparse-checkout set editors/emacs
git reset --hard HEAD
git status

and write about it

** TODO freebsd: learn how syscalls work                            :current:
I have a thread on the mailing list from a while back, where Warner explained how the libc function definition gets created. I’m not sure how it ultimately gets dispatched to an implementation.

** TODO fossil: make a better ticket dashboard                      :current:
My ticket dashboard is pretty good - now just need to back it up, document it, etc.

** TODO freebsd: modify man.freebsd.org script to support apropos sections :current:
man.cgi uses man -k . and then filters, instead of apropos. So it's not possible to pass the -s flag, because man doesn't support -k and -s together.

submitted patch: https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=272906

** TODO fossil: render man pages
I can add man pages to my repo - can I have them render, either as www or in the source tree?
Maybe I can just use a Makefile to render the man page to markdown / HTML in the www directory.

** TODO freebsd: document a "well behaved port"
** TODO fossil: build a package from source
** TODO fossil: make a demo of how to link between code, doc pages, wiki, issues, and forums
I think it can do cool stuff by linking the artifact IDs.

Maybe this should be more of an FAQ or short guide? I could start by making a page with a bunch of questions in it, then gradually fill it out. Probably that page lives in its own branch, and as I answer questions I do that in trunk.

** TODO oss: replace Makefile with Justfile
oss/Makefile checks out some repos. make doesn't really seem like a good fit for that. It would be better to make a Justfile that runs a script to do the necessary checks.

** TODO freebsd: determine which config files have been changed since running the system
Can probably do this by extracting base.txz and doing an mtree on /etc and /boot - nothIng else really changes there.

Everything in etc for the most part, unless there’s a sample file that is equivalent.

** TODO site: add code syntax highlighting
Should be easy to do with prism

** TODO freebsd: tool to find config files that have changed since install that need backing up
What should I back up? Fetches source / automates install to a temp dir, runs mtree, then finds the files that have changed. Add those to the list of files to be sent to Tarsnap (or wherever).
** TODO freebsd: homerc - an rc.conf framework for home directory files
** TODO ports: poudriere-pal
Tool to parse UCL and make sure that the poudriere configuration matches what is expected (aka declarative poudriere conf). Make it easy to have the same poudriere setup across machines.
Of course I could always take a closer look at how poudriere is actually configured… maybe I can edit config files directly.

** TODO fossil: render backlinks (e.g. related tickets)
similar to the [blocking tickets] feature, I think general backlinks would be useful. Fossil already does this for tickets, where you can see all of the check-ins related to a particular ticket.
** TODO writing: what I would do differently
- keep my library
- invest in open source editor and OS (utility vs asset)


** TODO Compare sqlite interop with different languages
** TODO pf: write a tool to test rules
Define a config file that tests connections between hosts. Have it launch a bunch of jails, then try to make connections among them.
** TODO fossil: remove duplicate entries from `fossil all ls`
FreeBSD symlinks (or something) /home to /usr/home and so now I have some duplicate repos showing up in fossil all ls - one using /home prefix and another using /usr/home prefix.

It might even be an opportunity to improve Fossil by doing fstat or realpath or something like that to determine if they're the same file, and not add the same path twice.

Found that I could sqlite3 ~/.fossil and see the entries that appear in fossil all ls. I just deleted the duplicates that I didn't want. We'll see if they come back.

** TODO fossil: make a demo of how to close and hide a branch
Wow that’s cool.

I’m just adding a comment.

There's no reason for it to clutter up the ticket list.

I'm not quite sure how to do this. I tried fossil branch --close and it marked the branch as closed - but I'm not even really sure what that means. I think it means it's not supposed to have any more children.

I think if you want to hide a branch, then you need to close the leaf, and hide the first child.

** TODO fossil: fzf switch branches
just branch will display the list of branches if it doesn't receive an argument. Use fzf to make a dropdown list of stuff instead.

** TODO fossil: test timeline after merge
commit to trunk, make a branch, a few commits to it. also commit to trunk. merge trunk into branch. What shows up in the timeline? All the commits, including the merged ones from master? Or only the ones that specifically have that branch tag?

** TODO fossil: script check out dies for open and closed branches
I should have a checkout for all open branches. Realistically I’ll have them because I opened them. But, I work on multiple computers - so I may not have them open.

Write a little script that automatically makes check outs for each of my open branches, and deletes dirs for cleaned branches that have been closed.

** TODO fossil: make a demo of wiki and tech notes
** TODO doc: update home page to show WIP
Combination of leaf commits and tickets.
** TODO fossil: ask forum about private branches vs child projects
Better: do an experiment myself
** TODO p7x: make my home page a wiki page
That way I can edit it from my browser. Todo list and stuff should go there, for easy reference, marking things as done, etc.
** TODO fossil: look into why the timeline filter page only show check-ins when filtering on tag
You can tag technical notes and wiki pages, but when you filter on the timeline, only check-ins show up.
** TODO fossil: document my mv-301-redirect branch
** TODO emacs: port hcl-mode
** TODO freebsd: make a jail host platform
** TODO freebsd: use kyua and atf to test stuff
** TODO Compare configuration management tools (ansible, salt, etc)
** TODO emacs: disable shell command echoing
whenever I type a shell command, it echoes it back immediately.
not a fan.
* tasks
** TODO set up CI for my home dir
It should test things like emacs config, so I don't push something that won't work on a headless terminal.

Added oss/.gitkeep.

Changes to oss/Mk/gitrepo.mk.

1
2
3
4
5
6
7
8
9
10
11
12
13
MAIN?=		main
BRANCH?=	${MAIN}
BRANCHES?=
TAGS?=
SRCDIR=		b/${BRANCH}
GITDIR=		.git

.PHONY: branches clean do-clean clean-git build do-build fetch fetch-all git-push git-push-all git-remote-update git-tidy help help-builtin do-help test do-test merge-upstream

.if defined(UPSTREAM)
GIT_PROTOCOL?= ssh
.endif





|
|







1
2
3
4
5
6
7
8
9
10
11
12
13
MAIN?=		main
BRANCH?=	${MAIN}
BRANCHES?=
TAGS?=
SRCDIR=		${.CURDIR}/b/${BRANCH}
GITDIR=		${.CURDIR}/.git

.PHONY: branches clean do-clean clean-git build do-build fetch fetch-all git-push git-push-all git-remote-update git-tidy help help-builtin do-help test do-test merge-upstream

.if defined(UPSTREAM)
GIT_PROTOCOL?= ssh
.endif

Added oss/freebsd-ports.git/SUBMISSIONS.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
276452 databases/py-dbt-semantic-interfaces
276453 databases/py-dbt-core: databases/py-dbt-semantic-interfaces
276454 databases/py-dbt-duckdb
276455 databases/py-snowflake-connector-python
276456 databases/py-dbt-snowflake
276457 databases/py-schemachange
276458 databases/py-snowddl
276460 textproc/ox-gfm.el

Changes to oss/freebsd-src.git/Makefile.

42
43
44
45
46
47
48
49
50
51
52
53
54
	${PKG} install -y -r ${BASEREPO} ${BASE_PACKAGES:sh}
	certctl -D ${BASE} rehash

installer:
	@rm -f ${IMG}
	${SRCDIR}/release/amd64/make-memstick.sh ${BASE} ${IMG}

do-help:
	@echo 'build			# build source code'
	@echo 'packages		# install packages to ${REPODIR}'
	@echo 'installer		# build an installer image'

.include "../Mk/gitrepo.mk"







<
<
<
<
<

42
43
44
45
46
47
48





49
	${PKG} install -y -r ${BASEREPO} ${BASE_PACKAGES:sh}
	certctl -D ${BASE} rehash

installer:
	@rm -f ${IMG}
	${SRCDIR}/release/amd64/make-memstick.sh ${BASE} ${IMG}






.include "../Mk/gitrepo.mk"

Deleted oss/py-mimesis.git/Makefile.

1
2
3
4
5
USE_GH=		patmaddox/mimesis
UPSTREAM=	lk-geimfari/
MAIN=		master

.include "../Mk/gitrepo.mk"
<
<
<
<
<










Added oss/tailscale-dev/Justfile.







































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
tsdir := "~/oss/tailscale.git/10024-freebsd-exit-node-client"
archiebin := "/jails/archie/usr/local/bin/"

build:
  #!/bin/sh
  set -e

  cd {{tsdir}}
  go121 build tailscale.com/cmd/tailscale
  go121 build tailscale.com/cmd/tailscaled

  doas jexec archie service tailscaled onestop || true
  doas cp tailscale {{archiebin}}
  doas cp tailscaled {{archiebin}}
  doas jexec archie service tailscaled onestart
  echo "started tailscale"

stash *args:
  cd {{tsdir}} && git stash {{args}}

Added pastebin/2023-04-20_fossil-make-test-results.txt.

































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723
1724
1725
1726
1727
1728
1729
1730
1731
1732
1733
1734
1735
1736
1737
1738
1739
1740
1741
1742
1743
1744
1745
1746
1747
1748
1749
1750
1751
1752
1753
1754
1755
1756
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832
1833
1834
1835
1836
1837
1838
1839
1840
1841
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864
1865
1866
1867
1868
1869
1870
1871
1872
1873
1874
1875
1876
1877
1878
1879
1880
1881
1882
1883
1884
1885
1886
1887
1888
1889
1890
1891
1892
1893
1894
1895
1896
1897
1898
1899
1900
1901
1902
1903
1904
1905
1906
1907
1908
1909
1910
1911
1912
1913
1914
1915
1916
1917
1918
1919
1920
1921
1922
1923
1924
1925
1926
1927
1928
1929
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939
1940
1941
1942
1943
1944
1945
1946
1947
1948
1949
1950
1951
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
1966
1967
1968
1969
1970
1971
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987
1988
1989
1990
1991
1992
1993
1994
1995
1996
1997
1998
1999
2000
2001
2002
2003
2004
2005
2006
2007
2008
2009
2010
2011
2012
2013
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023
2024
2025
2026
2027
2028
2029
2030
2031
2032
2033
2034
2035
2036
2037
2038
2039
2040
2041
2042
2043
2044
2045
2046
2047
2048
2049
2050
2051
2052
2053
2054
2055
2056
2057
2058
2059
2060
2061
2062
2063
2064
2065
2066
2067
2068
2069
2070
2071
2072
2073
2074
2075
2076
2077
2078
2079
2080
2081
2082
2083
2084
2085
2086
2087
2088
2089
2090
2091
2092
2093
2094
2095
2096
2097
2098
2099
2100
2101
2102
2103
2104
2105
2106
2107
2108
2109
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
2121
2122
2123
2124
2125
2126
2127
2128
patmaddox@beastie:~/oss/fossil-scm $ make clean
make clean
rm -rf bld/* fossil
patmaddox@beastie:~/oss/fossil-scm $ ./configure --with-th1-docs --with-th1-hooks --json --with-tcl-private-stubs --with-tcl=1
./configure --with-th1-docs --with-th1-hooks --json --with-tcl-private-stubs --with-tcl=1
Host System...x86_64-unknown-freebsd13.1
Build System...x86_64-unknown-freebsd13.1
C compiler... cc -g -O2
C++ compiler... c++ -g -O2
Build C compiler...cc
Checking for stdlib.h...ok
Checking for uint32_t...ok
Checking for uint16_t...ok
Checking for int16_t...ok
Checking for uint8_t...ok
Checking for pread...ok
Checking for cscope...no
Checking for tclsh...ok
Found Tclsh version 8.6 in the PATH.
JSON support enabled
TH1 embedded documentation support enabled
TH1 hooks support enabled
Checking libs for iconv...none needed
Checking for zlib.h...ok
Checking for inflateEnd in z...-lz
Checking for system ssl...ok
HTTPS support enabled
Using sqlite3.c from this source tree.
Found Tcl Private Stubs at /usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6
Checking libs for gethostbyname...none needed
Checking libs for socket...none needed
Checking for arpa/nameser.h...ok
Checking for bind/resolv.h...not found
Checking for resolv.h...not found
Checking libs for dn_expand...none needed
Checking libs for ns_parserr...no
Checking libs for __ns_parserr...none needed
Checking libs for res_query...none needed
Checking libs for res_9_ns_initparse...no
Checking for utime...ok
Checking for usleep...ok
Checking for strchrnul...ok
Checking for pledge...not found
Checking for backtrace...not found
Found getpass() with unistd.h
Checking for getloadavg...ok
Checking for getpassphrase...not found
Checking libs for getpass...none needed
Checking libs for sin...-lm
Checking libs for fuse_mount...no
Checking for dlopen in dl...none needed
Makefile is unchanged
autoconfig.h is unchanged
patmaddox@beastie:~/oss/fossil-scm $ make test
make test
cc -g -O2 -o bld/translate ./tools/translate.c
bld/translate ./src/add.c >bld/add_.c
bld/translate ./src/ajax.c >bld/ajax_.c
bld/translate ./src/alerts.c >bld/alerts_.c
bld/translate ./src/allrepo.c >bld/allrepo_.c
bld/translate ./src/attach.c >bld/attach_.c
bld/translate ./src/backlink.c >bld/backlink_.c
bld/translate ./src/backoffice.c >bld/backoffice_.c
bld/translate ./src/bag.c >bld/bag_.c
bld/translate ./src/bisect.c >bld/bisect_.c
bld/translate ./src/blob.c >bld/blob_.c
bld/translate ./src/branch.c >bld/branch_.c
bld/translate ./src/browse.c >bld/browse_.c
bld/translate ./src/builtin.c >bld/builtin_.c
bld/translate ./src/bundle.c >bld/bundle_.c
bld/translate ./src/cache.c >bld/cache_.c
bld/translate ./src/capabilities.c >bld/capabilities_.c
bld/translate ./src/captcha.c >bld/captcha_.c
bld/translate ./src/cgi.c >bld/cgi_.c
bld/translate ./src/chat.c >bld/chat_.c
bld/translate ./src/checkin.c >bld/checkin_.c
bld/translate ./src/checkout.c >bld/checkout_.c
bld/translate ./src/clearsign.c >bld/clearsign_.c
bld/translate ./src/clone.c >bld/clone_.c
bld/translate ./src/color.c >bld/color_.c
bld/translate ./src/comformat.c >bld/comformat_.c
bld/translate ./src/configure.c >bld/configure_.c
bld/translate ./src/content.c >bld/content_.c
bld/translate ./src/cookies.c >bld/cookies_.c
bld/translate ./src/db.c >bld/db_.c
bld/translate ./src/delta.c >bld/delta_.c
bld/translate ./src/deltacmd.c >bld/deltacmd_.c
bld/translate ./src/deltafunc.c >bld/deltafunc_.c
bld/translate ./src/descendants.c >bld/descendants_.c
bld/translate ./src/diff.c >bld/diff_.c
bld/translate ./src/diffcmd.c >bld/diffcmd_.c
bld/translate ./src/dispatch.c >bld/dispatch_.c
bld/translate ./src/doc.c >bld/doc_.c
bld/translate ./src/encode.c >bld/encode_.c
bld/translate ./src/etag.c >bld/etag_.c
bld/translate ./src/event.c >bld/event_.c
bld/translate ./src/export.c >bld/export_.c
bld/translate ./src/extcgi.c >bld/extcgi_.c
bld/translate ./src/file.c >bld/file_.c
bld/translate ./src/fileedit.c >bld/fileedit_.c
bld/translate ./src/finfo.c >bld/finfo_.c
bld/translate ./src/foci.c >bld/foci_.c
bld/translate ./src/forum.c >bld/forum_.c
bld/translate ./src/fshell.c >bld/fshell_.c
bld/translate ./src/fusefs.c >bld/fusefs_.c
bld/translate ./src/fuzz.c >bld/fuzz_.c
bld/translate ./src/glob.c >bld/glob_.c
bld/translate ./src/graph.c >bld/graph_.c
bld/translate ./src/gzip.c >bld/gzip_.c
bld/translate ./src/hname.c >bld/hname_.c
bld/translate ./src/hook.c >bld/hook_.c
bld/translate ./src/http.c >bld/http_.c
bld/translate ./src/http_socket.c >bld/http_socket_.c
bld/translate ./src/http_ssl.c >bld/http_ssl_.c
bld/translate ./src/http_transport.c >bld/http_transport_.c
bld/translate ./src/import.c >bld/import_.c
bld/translate ./src/info.c >bld/info_.c
bld/translate ./src/interwiki.c >bld/interwiki_.c
bld/translate ./src/json.c >bld/json_.c
bld/translate ./src/json_artifact.c >bld/json_artifact_.c
bld/translate ./src/json_branch.c >bld/json_branch_.c
bld/translate ./src/json_config.c >bld/json_config_.c
bld/translate ./src/json_diff.c >bld/json_diff_.c
bld/translate ./src/json_dir.c >bld/json_dir_.c
bld/translate ./src/json_finfo.c >bld/json_finfo_.c
bld/translate ./src/json_login.c >bld/json_login_.c
bld/translate ./src/json_query.c >bld/json_query_.c
bld/translate ./src/json_report.c >bld/json_report_.c
bld/translate ./src/json_status.c >bld/json_status_.c
bld/translate ./src/json_tag.c >bld/json_tag_.c
bld/translate ./src/json_timeline.c >bld/json_timeline_.c
bld/translate ./src/json_user.c >bld/json_user_.c
bld/translate ./src/json_wiki.c >bld/json_wiki_.c
bld/translate ./src/leaf.c >bld/leaf_.c
bld/translate ./src/loadctrl.c >bld/loadctrl_.c
bld/translate ./src/login.c >bld/login_.c
bld/translate ./src/lookslike.c >bld/lookslike_.c
bld/translate ./src/main.c >bld/main_.c
bld/translate ./src/manifest.c >bld/manifest_.c
bld/translate ./src/markdown.c >bld/markdown_.c
bld/translate ./src/markdown_html.c >bld/markdown_html_.c
bld/translate ./src/md5.c >bld/md5_.c
bld/translate ./src/merge.c >bld/merge_.c
bld/translate ./src/merge3.c >bld/merge3_.c
bld/translate ./src/moderate.c >bld/moderate_.c
bld/translate ./src/name.c >bld/name_.c
bld/translate ./src/patch.c >bld/patch_.c
bld/translate ./src/path.c >bld/path_.c
bld/translate ./src/piechart.c >bld/piechart_.c
bld/translate ./src/pikchrshow.c >bld/pikchrshow_.c
bld/translate ./src/pivot.c >bld/pivot_.c
bld/translate ./src/popen.c >bld/popen_.c
bld/translate ./src/pqueue.c >bld/pqueue_.c
bld/translate ./src/printf.c >bld/printf_.c
bld/translate ./src/publish.c >bld/publish_.c
bld/translate ./src/purge.c >bld/purge_.c
bld/translate ./src/rebuild.c >bld/rebuild_.c
bld/translate ./src/regexp.c >bld/regexp_.c
bld/translate ./src/repolist.c >bld/repolist_.c
bld/translate ./src/report.c >bld/report_.c
bld/translate ./src/rss.c >bld/rss_.c
bld/translate ./src/schema.c >bld/schema_.c
bld/translate ./src/search.c >bld/search_.c
bld/translate ./src/security_audit.c >bld/security_audit_.c
bld/translate ./src/setup.c >bld/setup_.c
bld/translate ./src/setupuser.c >bld/setupuser_.c
bld/translate ./src/sha1.c >bld/sha1_.c
bld/translate ./src/sha1hard.c >bld/sha1hard_.c
bld/translate ./src/sha3.c >bld/sha3_.c
bld/translate ./src/shun.c >bld/shun_.c
bld/translate ./src/sitemap.c >bld/sitemap_.c
bld/translate ./src/skins.c >bld/skins_.c
bld/translate ./src/smtp.c >bld/smtp_.c
bld/translate ./src/sqlcmd.c >bld/sqlcmd_.c
bld/translate ./src/stash.c >bld/stash_.c
bld/translate ./src/stat.c >bld/stat_.c
bld/translate ./src/statrep.c >bld/statrep_.c
bld/translate ./src/style.c >bld/style_.c
bld/translate ./src/sync.c >bld/sync_.c
bld/translate ./src/tag.c >bld/tag_.c
bld/translate ./src/tar.c >bld/tar_.c
bld/translate ./src/terminal.c >bld/terminal_.c
bld/translate ./src/th_main.c >bld/th_main_.c
bld/translate ./src/timeline.c >bld/timeline_.c
bld/translate ./src/tkt.c >bld/tkt_.c
bld/translate ./src/tktsetup.c >bld/tktsetup_.c
bld/translate ./src/undo.c >bld/undo_.c
bld/translate ./src/unicode.c >bld/unicode_.c
bld/translate ./src/unversioned.c >bld/unversioned_.c
bld/translate ./src/update.c >bld/update_.c
bld/translate ./src/url.c >bld/url_.c
bld/translate ./src/user.c >bld/user_.c
bld/translate ./src/utf8.c >bld/utf8_.c
bld/translate ./src/util.c >bld/util_.c
bld/translate ./src/verify.c >bld/verify_.c
bld/translate ./src/vfile.c >bld/vfile_.c
bld/translate ./src/wiki.c >bld/wiki_.c
bld/translate ./src/wikiformat.c >bld/wikiformat_.c
bld/translate ./src/winfile.c >bld/winfile_.c
bld/translate ./src/winhttp.c >bld/winhttp_.c
bld/translate ./src/xfer.c >bld/xfer_.c
bld/translate ./src/xfersetup.c >bld/xfersetup_.c
bld/translate ./src/zip.c >bld/zip_.c
cc -g -O2 -o bld/mkindex ./tools/mkindex.c
bld/mkindex bld/add_.c  bld/ajax_.c  bld/alerts_.c  bld/allrepo_.c  bld/attach_.c  bld/backlink_.c  bld/backoffice_.c  bld/bag_.c  bld/bisect_.c  bld/blob_.c  bld/branch_.c  bld/browse_.c  bld/builtin_.c  bld/bundle_.c  bld/cache_.c  bld/capabilities_.c  bld/captcha_.c  bld/cgi_.c  bld/chat_.c  bld/checkin_.c  bld/checkout_.c  bld/clearsign_.c  bld/clone_.c  bld/color_.c  bld/comformat_.c  bld/configure_.c  bld/content_.c  bld/cookies_.c  bld/db_.c  bld/delta_.c  bld/deltacmd_.c  bld/deltafunc_.c  bld/descendants_.c  bld/diff_.c  bld/diffcmd_.c  bld/dispatch_.c  bld/doc_.c  bld/encode_.c  bld/etag_.c  bld/event_.c  bld/export_.c  bld/extcgi_.c  bld/file_.c  bld/fileedit_.c  bld/finfo_.c  bld/foci_.c  bld/forum_.c  bld/fshell_.c  bld/fusefs_.c  bld/fuzz_.c  bld/glob_.c  bld/graph_.c  bld/gzip_.c  bld/hname_.c  bld/hook_.c  bld/http_.c  bld/http_socket_.c  bld/http_ssl_.c  bld/http_transport_.c  bld/import_.c  bld/info_.c  bld/interwiki_.c  bld/json_.c  bld/json_artifact_.c  bld/json_branch_.c  bld/json_config_.c  bld/json_diff_.c  bld/json_dir_.c  bld/json_finfo_.c  bld/json_login_.c  bld/json_query_.c  bld/json_report_.c  bld/json_status_.c  bld/json_tag_.c  bld/json_timeline_.c  bld/json_user_.c  bld/json_wiki_.c  bld/leaf_.c  bld/loadctrl_.c  bld/login_.c  bld/lookslike_.c  bld/main_.c  bld/manifest_.c  bld/markdown_.c  bld/markdown_html_.c  bld/md5_.c  bld/merge_.c  bld/merge3_.c  bld/moderate_.c  bld/name_.c  bld/patch_.c  bld/path_.c  bld/piechart_.c  bld/pikchrshow_.c  bld/pivot_.c  bld/popen_.c  bld/pqueue_.c  bld/printf_.c  bld/publish_.c  bld/purge_.c  bld/rebuild_.c  bld/regexp_.c  bld/repolist_.c  bld/report_.c  bld/rss_.c  bld/schema_.c  bld/search_.c  bld/security_audit_.c  bld/setup_.c  bld/setupuser_.c  bld/sha1_.c  bld/sha1hard_.c  bld/sha3_.c  bld/shun_.c  bld/sitemap_.c  bld/skins_.c  bld/smtp_.c  bld/sqlcmd_.c  bld/stash_.c  bld/stat_.c  bld/statrep_.c  bld/style_.c  bld/sync_.c  bld/tag_.c  bld/tar_.c  bld/terminal_.c  bld/th_main_.c  bld/timeline_.c  bld/tkt_.c  bld/tktsetup_.c  bld/undo_.c  bld/unicode_.c  bld/unversioned_.c  bld/update_.c  bld/url_.c  bld/user_.c  bld/utf8_.c  bld/util_.c  bld/verify_.c  bld/vfile_.c  bld/wiki_.c  bld/wikiformat_.c  bld/winfile_.c  bld/winhttp_.c  bld/xfer_.c  bld/xfersetup_.c  bld/zip_.c >bld/page_index.h
cc -g -O2 -o bld/mkbuiltin ./tools/mkbuiltin.c
bld/mkbuiltin --prefix ./src/ ./src/../extsrc/pikchr-worker.js  ./src/../extsrc/pikchr.js  ./src/../extsrc/pikchr.wasm  ./src/../skins/ardoise/css.txt  ./src/../skins/ardoise/details.txt  ./src/../skins/ardoise/footer.txt  ./src/../skins/ardoise/header.txt  ./src/../skins/black_and_white/css.txt  ./src/../skins/black_and_white/details.txt  ./src/../skins/black_and_white/footer.txt  ./src/../skins/black_and_white/header.txt  ./src/../skins/blitz/css.txt  ./src/../skins/blitz/details.txt  ./src/../skins/blitz/footer.txt  ./src/../skins/blitz/header.txt  ./src/../skins/blitz/ticket.txt  ./src/../skins/darkmode/css.txt  ./src/../skins/darkmode/details.txt  ./src/../skins/darkmode/footer.txt  ./src/../skins/darkmode/header.txt  ./src/../skins/default/css.txt  ./src/../skins/default/details.txt  ./src/../skins/default/footer.txt  ./src/../skins/default/header.txt  ./src/../skins/eagle/css.txt  ./src/../skins/eagle/details.txt  ./src/../skins/eagle/footer.txt  ./src/../skins/eagle/header.txt  ./src/../skins/khaki/css.txt  ./src/../skins/khaki/details.txt  ./src/../skins/khaki/footer.txt  ./src/../skins/khaki/header.txt  ./src/../skins/original/css.txt  ./src/../skins/original/details.txt  ./src/../skins/original/footer.txt  ./src/../skins/original/header.txt  ./src/../skins/plain_gray/css.txt  ./src/../skins/plain_gray/details.txt  ./src/../skins/plain_gray/footer.txt  ./src/../skins/plain_gray/header.txt  ./src/../skins/xekri/css.txt  ./src/../skins/xekri/details.txt  ./src/../skins/xekri/footer.txt  ./src/../skins/xekri/header.txt  ./src/accordion.js  ./src/alerts/bflat2.wav  ./src/alerts/bflat3.wav  ./src/alerts/bloop.wav  ./src/alerts/plunk.wav  ./src/ci_edit.js  ./src/copybtn.js  ./src/default.css  ./src/diff.js  ./src/diff.tcl  ./src/forum.js  ./src/fossil.bootstrap.js  ./src/fossil.confirmer.js  ./src/fossil.copybutton.js  ./src/fossil.diff.js  ./src/fossil.dom.js  ./src/fossil.fetch.js  ./src/fossil.numbered-lines.js  ./src/fossil.page.brlist.js  ./src/fossil.page.chat.js  ./src/fossil.page.fileedit.js  ./src/fossil.page.forumpost.js  ./src/fossil.page.pikchrshow.js  ./src/fossil.page.pikchrshowasm.js  ./src/fossil.page.whistory.js  ./src/fossil.page.wikiedit.js  ./src/fossil.pikchr.js  ./src/fossil.popupwidget.js  ./src/fossil.storage.js  ./src/fossil.tabs.js  ./src/fossil.wikiedit-wysiwyg.js  ./src/graph.js  ./src/hbmenu.js  ./src/href.js  ./src/login.js  ./src/markdown.md  ./src/menu.js  ./src/scroll.js  ./src/skin.js  ./src/sorttable.js  ./src/sounds/0.wav  ./src/sounds/1.wav  ./src/sounds/2.wav  ./src/sounds/3.wav  ./src/sounds/4.wav  ./src/sounds/5.wav  ./src/sounds/6.wav  ./src/sounds/7.wav  ./src/sounds/8.wav  ./src/sounds/9.wav  ./src/sounds/a.wav  ./src/sounds/b.wav  ./src/sounds/c.wav  ./src/sounds/d.wav  ./src/sounds/e.wav  ./src/sounds/f.wav  ./src/style.admin_log.css  ./src/style.chat.css  ./src/style.fileedit.css  ./src/style.pikchrshow.css  ./src/style.wikiedit.css  ./src/tree.js  ./src/useredit.js  ./src/wiki.wiki >bld/builtin_data.h
cc -g -O2 -o bld/makeheaders ./tools/makeheaders.c
cc -g -O2 -o bld/mkversion ./tools/mkversion.c
bld/mkversion ./src/../manifest.uuid  ./src/../manifest  ./src/../VERSION >bld/VERSION.h
bld/makeheaders bld/add_.c:bld/add.h  bld/ajax_.c:bld/ajax.h  bld/alerts_.c:bld/alerts.h  bld/allrepo_.c:bld/allrepo.h  bld/attach_.c:bld/attach.h  bld/backlink_.c:bld/backlink.h  bld/backoffice_.c:bld/backoffice.h  bld/bag_.c:bld/bag.h  bld/bisect_.c:bld/bisect.h  bld/blob_.c:bld/blob.h  bld/branch_.c:bld/branch.h  bld/browse_.c:bld/browse.h  bld/builtin_.c:bld/builtin.h  bld/bundle_.c:bld/bundle.h  bld/cache_.c:bld/cache.h  bld/capabilities_.c:bld/capabilities.h  bld/captcha_.c:bld/captcha.h  bld/cgi_.c:bld/cgi.h  bld/chat_.c:bld/chat.h  bld/checkin_.c:bld/checkin.h  bld/checkout_.c:bld/checkout.h  bld/clearsign_.c:bld/clearsign.h  bld/clone_.c:bld/clone.h  bld/color_.c:bld/color.h  bld/comformat_.c:bld/comformat.h  bld/configure_.c:bld/configure.h  bld/content_.c:bld/content.h  bld/cookies_.c:bld/cookies.h  bld/db_.c:bld/db.h  bld/delta_.c:bld/delta.h  bld/deltacmd_.c:bld/deltacmd.h  bld/deltafunc_.c:bld/deltafunc.h  bld/descendants_.c:bld/descendants.h  bld/diff_.c:bld/diff.h  bld/diffcmd_.c:bld/diffcmd.h  bld/dispatch_.c:bld/dispatch.h  bld/doc_.c:bld/doc.h  bld/encode_.c:bld/encode.h  bld/etag_.c:bld/etag.h  bld/event_.c:bld/event.h  bld/export_.c:bld/export.h  bld/extcgi_.c:bld/extcgi.h  bld/file_.c:bld/file.h  bld/fileedit_.c:bld/fileedit.h  bld/finfo_.c:bld/finfo.h  bld/foci_.c:bld/foci.h  bld/forum_.c:bld/forum.h  bld/fshell_.c:bld/fshell.h  bld/fusefs_.c:bld/fusefs.h  bld/fuzz_.c:bld/fuzz.h  bld/glob_.c:bld/glob.h  bld/graph_.c:bld/graph.h  bld/gzip_.c:bld/gzip.h  bld/hname_.c:bld/hname.h  bld/hook_.c:bld/hook.h  bld/http_.c:bld/http.h  bld/http_socket_.c:bld/http_socket.h  bld/http_ssl_.c:bld/http_ssl.h  bld/http_transport_.c:bld/http_transport.h  bld/import_.c:bld/import.h  bld/info_.c:bld/info.h  bld/interwiki_.c:bld/interwiki.h  bld/json_.c:bld/json.h  bld/json_artifact_.c:bld/json_artifact.h  bld/json_branch_.c:bld/json_branch.h  bld/json_config_.c:bld/json_config.h  bld/json_diff_.c:bld/json_diff.h  bld/json_dir_.c:bld/json_dir.h  bld/json_finfo_.c:bld/json_finfo.h  bld/json_login_.c:bld/json_login.h  bld/json_query_.c:bld/json_query.h  bld/json_report_.c:bld/json_report.h  bld/json_status_.c:bld/json_status.h  bld/json_tag_.c:bld/json_tag.h  bld/json_timeline_.c:bld/json_timeline.h  bld/json_user_.c:bld/json_user.h  bld/json_wiki_.c:bld/json_wiki.h  bld/leaf_.c:bld/leaf.h  bld/loadctrl_.c:bld/loadctrl.h  bld/login_.c:bld/login.h  bld/lookslike_.c:bld/lookslike.h  bld/main_.c:bld/main.h  bld/manifest_.c:bld/manifest.h  bld/markdown_.c:bld/markdown.h  bld/markdown_html_.c:bld/markdown_html.h  bld/md5_.c:bld/md5.h  bld/merge_.c:bld/merge.h  bld/merge3_.c:bld/merge3.h  bld/moderate_.c:bld/moderate.h  bld/name_.c:bld/name.h  bld/patch_.c:bld/patch.h  bld/path_.c:bld/path.h  bld/piechart_.c:bld/piechart.h  bld/pikchrshow_.c:bld/pikchrshow.h  bld/pivot_.c:bld/pivot.h  bld/popen_.c:bld/popen.h  bld/pqueue_.c:bld/pqueue.h  bld/printf_.c:bld/printf.h  bld/publish_.c:bld/publish.h  bld/purge_.c:bld/purge.h  bld/rebuild_.c:bld/rebuild.h  bld/regexp_.c:bld/regexp.h  bld/repolist_.c:bld/repolist.h  bld/report_.c:bld/report.h  bld/rss_.c:bld/rss.h  bld/schema_.c:bld/schema.h  bld/search_.c:bld/search.h  bld/security_audit_.c:bld/security_audit.h  bld/setup_.c:bld/setup.h  bld/setupuser_.c:bld/setupuser.h  bld/sha1_.c:bld/sha1.h  bld/sha1hard_.c:bld/sha1hard.h  bld/sha3_.c:bld/sha3.h  bld/shun_.c:bld/shun.h  bld/sitemap_.c:bld/sitemap.h  bld/skins_.c:bld/skins.h  bld/smtp_.c:bld/smtp.h  bld/sqlcmd_.c:bld/sqlcmd.h  bld/stash_.c:bld/stash.h  bld/stat_.c:bld/stat.h  bld/statrep_.c:bld/statrep.h  bld/style_.c:bld/style.h  bld/sync_.c:bld/sync.h  bld/tag_.c:bld/tag.h  bld/tar_.c:bld/tar.h  bld/terminal_.c:bld/terminal.h  bld/th_main_.c:bld/th_main.h  bld/timeline_.c:bld/timeline.h  bld/tkt_.c:bld/tkt.h  bld/tktsetup_.c:bld/tktsetup.h  bld/undo_.c:bld/undo.h  bld/unicode_.c:bld/unicode.h  bld/unversioned_.c:bld/unversioned.h  bld/update_.c:bld/update.h  bld/url_.c:bld/url.h  bld/user_.c:bld/user.h  bld/utf8_.c:bld/utf8.h  bld/util_.c:bld/util.h  bld/verify_.c:bld/verify.h  bld/vfile_.c:bld/vfile.h  bld/wiki_.c:bld/wiki.h  bld/wikiformat_.c:bld/wikiformat.h  bld/winfile_.c:bld/winfile.h  bld/winhttp_.c:bld/winhttp.h  bld/xfer_.c:bld/xfer.h  bld/xfersetup_.c:bld/xfersetup.h  bld/zip_.c:bld/zip.h  ./extsrc/pikchr.c:bld/pikchr.h  ./extsrc/sqlite3.h  ./src/th.h  bld/VERSION.h
touch bld/headers
cc -g -O2 -o bld/codecheck1 ./tools/codecheck1.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -DNDEBUG=1  -DSQLITE_DQS=0  -DSQLITE_THREADSAFE=0  -DSQLITE_DEFAULT_MEMSTATUS=0  -DSQLITE_DEFAULT_WAL_SYNCHRONOUS=1  -DSQLITE_LIKE_DOESNT_MATCH_BLOBS  -DSQLITE_OMIT_DECLTYPE  -DSQLITE_OMIT_DEPRECATED  -DSQLITE_OMIT_PROGRESS_CALLBACK  -DSQLITE_OMIT_SHARED_CACHE  -DSQLITE_OMIT_LOAD_EXTENSION  -DSQLITE_MAX_EXPR_DEPTH=0  -DSQLITE_ENABLE_LOCKING_STYLE=0  -DSQLITE_DEFAULT_FILE_FORMAT=4  -DSQLITE_ENABLE_EXPLAIN_COMMENTS  -DSQLITE_ENABLE_FTS4  -DSQLITE_ENABLE_DBSTAT_VTAB  -DSQLITE_ENABLE_FTS5  -DSQLITE_ENABLE_STMTVTAB  -DSQLITE_HAVE_ZLIB  -DSQLITE_ENABLE_DBPAGE_VTAB  -DSQLITE_TRUSTED_SCHEMA=0  -DHAVE_USLEEP    -c ./extsrc/sqlite3.c -o bld/sqlite3.o
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -c ./extsrc/linenoise.c -o bld/linenoise.o
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -DPIKCHR_TOKEN_LIMIT=10000 -c ./extsrc/pikchr.c -o bld/pikchr.o
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -DNDEBUG=1  -DSQLITE_DQS=0  -DSQLITE_THREADSAFE=0  -DSQLITE_DEFAULT_MEMSTATUS=0  -DSQLITE_DEFAULT_WAL_SYNCHRONOUS=1  -DSQLITE_LIKE_DOESNT_MATCH_BLOBS  -DSQLITE_OMIT_DECLTYPE  -DSQLITE_OMIT_DEPRECATED  -DSQLITE_OMIT_PROGRESS_CALLBACK  -DSQLITE_OMIT_SHARED_CACHE  -DSQLITE_OMIT_LOAD_EXTENSION  -DSQLITE_MAX_EXPR_DEPTH=0  -DSQLITE_ENABLE_LOCKING_STYLE=0  -DSQLITE_DEFAULT_FILE_FORMAT=4  -DSQLITE_ENABLE_EXPLAIN_COMMENTS  -DSQLITE_ENABLE_FTS4  -DSQLITE_ENABLE_DBSTAT_VTAB  -DSQLITE_ENABLE_FTS5  -DSQLITE_ENABLE_STMTVTAB  -DSQLITE_HAVE_ZLIB  -DSQLITE_ENABLE_DBPAGE_VTAB  -DSQLITE_TRUSTED_SCHEMA=0  -DHAVE_USLEEP  -Dmain=sqlite3_shell  -DSQLITE_SHELL_IS_UTF8=1  -DSQLITE_OMIT_LOAD_EXTENSION=1  -DUSE_SYSTEM_SQLITE=0  -DSQLITE_SHELL_DBNAME_PROC=sqlcmd_get_dbname  -DSQLITE_SHELL_INIT_PROC=sqlcmd_init_proc   -DHAVE_LINENOISE -c ./extsrc/shell.c -o bld/shell.o
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -c ./src/th.c -o bld/th.o
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -c ./src/th_lang.c -o bld/th_lang.o
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -c ./src/th_tcl.c -o bld/th_tcl.o
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -c ./extsrc/cson_amalgamation.c -o bld/cson_amalgamation.o
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/add.o -c bld/add_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/ajax.o -c bld/ajax_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/alerts.o -c bld/alerts_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/allrepo.o -c bld/allrepo_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/attach.o -c bld/attach_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/backlink.o -c bld/backlink_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/backoffice.o -c bld/backoffice_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/bag.o -c bld/bag_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/bisect.o -c bld/bisect_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/blob.o -c bld/blob_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/branch.o -c bld/branch_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/browse.o -c bld/browse_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/builtin.o -c bld/builtin_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/bundle.o -c bld/bundle_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/cache.o -c bld/cache_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/capabilities.o -c bld/capabilities_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/captcha.o -c bld/captcha_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/cgi.o -c bld/cgi_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/chat.o -c bld/chat_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/checkin.o -c bld/checkin_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/checkout.o -c bld/checkout_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/clearsign.o -c bld/clearsign_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/clone.o -c bld/clone_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/color.o -c bld/color_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/comformat.o -c bld/comformat_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/configure.o -c bld/configure_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/content.o -c bld/content_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/cookies.o -c bld/cookies_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/db.o -c bld/db_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/delta.o -c bld/delta_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/deltacmd.o -c bld/deltacmd_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/deltafunc.o -c bld/deltafunc_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/descendants.o -c bld/descendants_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/diff.o -c bld/diff_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/diffcmd.o -c bld/diffcmd_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/dispatch.o -c bld/dispatch_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/doc.o -c bld/doc_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/encode.o -c bld/encode_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/etag.o -c bld/etag_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/event.o -c bld/event_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/export.o -c bld/export_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/extcgi.o -c bld/extcgi_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/file.o -c bld/file_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/fileedit.o -c bld/fileedit_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/finfo.o -c bld/finfo_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/foci.o -c bld/foci_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/forum.o -c bld/forum_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/fshell.o -c bld/fshell_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/fusefs.o -c bld/fusefs_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/fuzz.o -c bld/fuzz_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/glob.o -c bld/glob_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/graph.o -c bld/graph_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/gzip.o -c bld/gzip_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/hname.o -c bld/hname_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/hook.o -c bld/hook_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/http.o -c bld/http_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/http_socket.o -c bld/http_socket_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/http_ssl.o -c bld/http_ssl_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/http_transport.o -c bld/http_transport_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/import.o -c bld/import_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/info.o -c bld/info_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/interwiki.o -c bld/interwiki_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/json.o -c bld/json_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/json_artifact.o -c bld/json_artifact_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/json_branch.o -c bld/json_branch_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/json_config.o -c bld/json_config_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/json_diff.o -c bld/json_diff_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/json_dir.o -c bld/json_dir_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/json_finfo.o -c bld/json_finfo_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/json_login.o -c bld/json_login_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/json_query.o -c bld/json_query_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/json_report.o -c bld/json_report_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/json_status.o -c bld/json_status_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/json_tag.o -c bld/json_tag_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/json_timeline.o -c bld/json_timeline_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/json_user.o -c bld/json_user_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/json_wiki.o -c bld/json_wiki_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/leaf.o -c bld/leaf_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/loadctrl.o -c bld/loadctrl_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/login.o -c bld/login_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/lookslike.o -c bld/lookslike_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/main.o -c bld/main_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/manifest.o -c bld/manifest_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/markdown.o -c bld/markdown_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/markdown_html.o -c bld/markdown_html_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/md5.o -c bld/md5_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/merge.o -c bld/merge_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/merge3.o -c bld/merge3_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/moderate.o -c bld/moderate_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/name.o -c bld/name_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/patch.o -c bld/patch_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/path.o -c bld/path_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/piechart.o -c bld/piechart_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/pikchrshow.o -c bld/pikchrshow_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/pivot.o -c bld/pivot_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/popen.o -c bld/popen_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/pqueue.o -c bld/pqueue_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/printf.o -c bld/printf_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/publish.o -c bld/publish_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/purge.o -c bld/purge_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/rebuild.o -c bld/rebuild_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/regexp.o -c bld/regexp_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/repolist.o -c bld/repolist_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/report.o -c bld/report_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/rss.o -c bld/rss_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/schema.o -c bld/schema_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/search.o -c bld/search_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/security_audit.o -c bld/security_audit_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/setup.o -c bld/setup_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/setupuser.o -c bld/setupuser_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/sha1.o -c bld/sha1_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/sha1hard.o -c bld/sha1hard_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/sha3.o -c bld/sha3_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/shun.o -c bld/shun_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/sitemap.o -c bld/sitemap_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/skins.o -c bld/skins_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/smtp.o -c bld/smtp_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/sqlcmd.o -c bld/sqlcmd_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/stash.o -c bld/stash_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/stat.o -c bld/stat_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/statrep.o -c bld/statrep_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/style.o -c bld/style_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/sync.o -c bld/sync_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/tag.o -c bld/tag_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/tar.o -c bld/tar_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/terminal.o -c bld/terminal_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/th_main.o -c bld/th_main_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/timeline.o -c bld/timeline_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/tkt.o -c bld/tkt_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/tktsetup.o -c bld/tktsetup_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/undo.o -c bld/undo_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/unicode.o -c bld/unicode_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/unversioned.o -c bld/unversioned_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/update.o -c bld/update_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/url.o -c bld/url_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/user.o -c bld/user_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/utf8.o -c bld/utf8_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/util.o -c bld/util_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/verify.o -c bld/verify_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/vfile.o -c bld/vfile_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/wiki.o -c bld/wiki_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/wikiformat.o -c bld/wikiformat_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/winfile.o -c bld/winfile_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/winhttp.o -c bld/winhttp_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/xfer.o -c bld/xfer_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/xfersetup.o -c bld/xfersetup_.c
cc -I. -I./src -I./extsrc -Ibld -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o bld/zip.o -c bld/zip_.c
bld/codecheck1 bld/add_.c  bld/ajax_.c  bld/alerts_.c  bld/allrepo_.c  bld/attach_.c  bld/backlink_.c  bld/backoffice_.c  bld/bag_.c  bld/bisect_.c  bld/blob_.c  bld/branch_.c  bld/browse_.c  bld/builtin_.c  bld/bundle_.c  bld/cache_.c  bld/capabilities_.c  bld/captcha_.c  bld/cgi_.c  bld/chat_.c  bld/checkin_.c  bld/checkout_.c  bld/clearsign_.c  bld/clone_.c  bld/color_.c  bld/comformat_.c  bld/configure_.c  bld/content_.c  bld/cookies_.c  bld/db_.c  bld/delta_.c  bld/deltacmd_.c  bld/deltafunc_.c  bld/descendants_.c  bld/diff_.c  bld/diffcmd_.c  bld/dispatch_.c  bld/doc_.c  bld/encode_.c  bld/etag_.c  bld/event_.c  bld/export_.c  bld/extcgi_.c  bld/file_.c  bld/fileedit_.c  bld/finfo_.c  bld/foci_.c  bld/forum_.c  bld/fshell_.c  bld/fusefs_.c  bld/fuzz_.c  bld/glob_.c  bld/graph_.c  bld/gzip_.c  bld/hname_.c  bld/hook_.c  bld/http_.c  bld/http_socket_.c  bld/http_ssl_.c  bld/http_transport_.c  bld/import_.c  bld/info_.c  bld/interwiki_.c  bld/json_.c  bld/json_artifact_.c  bld/json_branch_.c  bld/json_config_.c  bld/json_diff_.c  bld/json_dir_.c  bld/json_finfo_.c  bld/json_login_.c  bld/json_query_.c  bld/json_report_.c  bld/json_status_.c  bld/json_tag_.c  bld/json_timeline_.c  bld/json_user_.c  bld/json_wiki_.c  bld/leaf_.c  bld/loadctrl_.c  bld/login_.c  bld/lookslike_.c  bld/main_.c  bld/manifest_.c  bld/markdown_.c  bld/markdown_html_.c  bld/md5_.c  bld/merge_.c  bld/merge3_.c  bld/moderate_.c  bld/name_.c  bld/patch_.c  bld/path_.c  bld/piechart_.c  bld/pikchrshow_.c  bld/pivot_.c  bld/popen_.c  bld/pqueue_.c  bld/printf_.c  bld/publish_.c  bld/purge_.c  bld/rebuild_.c  bld/regexp_.c  bld/repolist_.c  bld/report_.c  bld/rss_.c  bld/schema_.c  bld/search_.c  bld/security_audit_.c  bld/setup_.c  bld/setupuser_.c  bld/sha1_.c  bld/sha1hard_.c  bld/sha3_.c  bld/shun_.c  bld/sitemap_.c  bld/skins_.c  bld/smtp_.c  bld/sqlcmd_.c  bld/stash_.c  bld/stat_.c  bld/statrep_.c  bld/style_.c  bld/sync_.c  bld/tag_.c  bld/tar_.c  bld/terminal_.c  bld/th_main_.c  bld/timeline_.c  bld/tkt_.c  bld/tktsetup_.c  bld/undo_.c  bld/unicode_.c  bld/unversioned_.c  bld/update_.c  bld/url_.c  bld/user_.c  bld/utf8_.c  bld/util_.c  bld/verify_.c  bld/vfile_.c  bld/wiki_.c  bld/wikiformat_.c  bld/winfile_.c  bld/winhttp_.c  bld/xfer_.c  bld/xfersetup_.c  bld/zip_.c
cc -Wall -Wdeclaration-after-statement -DFOSSIL_ENABLE_JSON -DFOSSIL_ENABLE_TH1_DOCS -DFOSSIL_ENABLE_TH1_HOOKS -DFOSSIL_DYNAMIC_BUILD=1 -I/usr/home/patmaddox/oss/fossil-scm/compat/tcl-8.6/generic  -g -O2 -DHAVE_AUTOCONFIG_H -o fossil bld/sqlite3.o  bld/linenoise.o  bld/pikchr.o  bld/shell.o  bld/th.o  bld/th_lang.o  bld/th_tcl.o  bld/cson_amalgamation.o bld/add.o  bld/ajax.o  bld/alerts.o  bld/allrepo.o  bld/attach.o  bld/backlink.o  bld/backoffice.o  bld/bag.o  bld/bisect.o  bld/blob.o  bld/branch.o  bld/browse.o  bld/builtin.o  bld/bundle.o  bld/cache.o  bld/capabilities.o  bld/captcha.o  bld/cgi.o  bld/chat.o  bld/checkin.o  bld/checkout.o  bld/clearsign.o  bld/clone.o  bld/color.o  bld/comformat.o  bld/configure.o  bld/content.o  bld/cookies.o  bld/db.o  bld/delta.o  bld/deltacmd.o  bld/deltafunc.o  bld/descendants.o  bld/diff.o  bld/diffcmd.o  bld/dispatch.o  bld/doc.o  bld/encode.o  bld/etag.o  bld/event.o  bld/export.o  bld/extcgi.o  bld/file.o  bld/fileedit.o  bld/finfo.o  bld/foci.o  bld/forum.o  bld/fshell.o  bld/fusefs.o  bld/fuzz.o  bld/glob.o  bld/graph.o  bld/gzip.o  bld/hname.o  bld/hook.o  bld/http.o  bld/http_socket.o  bld/http_ssl.o  bld/http_transport.o  bld/import.o  bld/info.o  bld/interwiki.o  bld/json.o  bld/json_artifact.o  bld/json_branch.o  bld/json_config.o  bld/json_diff.o  bld/json_dir.o  bld/json_finfo.o  bld/json_login.o  bld/json_query.o  bld/json_report.o  bld/json_status.o  bld/json_tag.o  bld/json_timeline.o  bld/json_user.o  bld/json_wiki.o  bld/leaf.o  bld/loadctrl.o  bld/login.o  bld/lookslike.o  bld/main.o  bld/manifest.o  bld/markdown.o  bld/markdown_html.o  bld/md5.o  bld/merge.o  bld/merge3.o  bld/moderate.o  bld/name.o  bld/patch.o  bld/path.o  bld/piechart.o  bld/pikchrshow.o  bld/pivot.o  bld/popen.o  bld/pqueue.o  bld/printf.o  bld/publish.o  bld/purge.o  bld/rebuild.o  bld/regexp.o  bld/repolist.o  bld/report.o  bld/rss.o  bld/schema.o  bld/search.o  bld/security_audit.o  bld/setup.o  bld/setupuser.o  bld/sha1.o  bld/sha1hard.o  bld/sha3.o  bld/shun.o  bld/sitemap.o  bld/skins.o  bld/smtp.o  bld/sqlcmd.o  bld/stash.o  bld/stat.o  bld/statrep.o  bld/style.o  bld/sync.o  bld/tag.o  bld/tar.o  bld/terminal.o  bld/th_main.o  bld/timeline.o  bld/tkt.o  bld/tktsetup.o  bld/undo.o  bld/unicode.o  bld/unversioned.o  bld/update.o  bld/url.o  bld/user.o  bld/utf8.o  bld/util.o  bld/verify.o  bld/vfile.o  bld/wiki.o  bld/wikiformat.o  bld/winfile.o  bld/winhttp.o  bld/xfer.o  bld/xfersetup.o  bld/zip.o -lm -lssl -lcrypto -lz
tclsh ./src/../test/tester.tcl fossil -quiet
test pre-commit-warnings-fossil-1 FAILED!
RESULT: 1	compat/zlib/contrib/blast/test.pk	binary data
1	compat/zlib/contrib/dotzlib/DotZLib.build	CR/LF line endings
1	compat/zlib/contrib/dotzlib/DotZLib.chm	binary data
1	compat/zlib/contrib/dotzlib/DotZLib.sln	CR/LF line endings
1	compat/zlib/contrib/dotzlib/DotZLib/AssemblyInfo.cs	CR/LF line endings
1	compat/zlib/contrib/dotzlib/DotZLib/ChecksumImpl.cs	invalid UTF-8
1	compat/zlib/contrib/dotzlib/DotZLib/CircularBuffer.cs	invalid UTF-8
1	compat/zlib/contrib/dotzlib/DotZLib/CodecBase.cs	invalid UTF-8
1	compat/zlib/contrib/dotzlib/DotZLib/Deflater.cs	invalid UTF-8
1	compat/zlib/contrib/dotzlib/DotZLib/DotZLib.cs	invalid UTF-8
1	compat/zlib/contrib/dotzlib/DotZLib/DotZLib.csproj	CR/LF line endings
1	compat/zlib/contrib/dotzlib/DotZLib/GZipStream.cs	invalid UTF-8
1	compat/zlib/contrib/dotzlib/DotZLib/Inflater.cs	invalid UTF-8
1	compat/zlib/contrib/dotzlib/DotZLib/UnitTests.cs	CR/LF line endings
1	compat/zlib/contrib/dotzlib/LICENSE_1_0.txt	CR/LF line endings
1	compat/zlib/contrib/dotzlib/readme.txt	CR/LF line endings
1	compat/zlib/contrib/gcc_gvmat64/gvmat64.S	CR/LF line endings
1	compat/zlib/contrib/puff/zeros.raw	binary data
1	compat/zlib/contrib/testzlib/testzlib.c	CR/LF line endings
1	compat/zlib/contrib/testzlib/testzlib.txt	CR/LF line endings
1	compat/zlib/contrib/vstudio/readme.txt	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc10/miniunz.vcxproj	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc10/miniunz.vcxproj.filters	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc10/minizip.vcxproj	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc10/minizip.vcxproj.filters	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc10/testzlib.vcxproj	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc10/testzlib.vcxproj.filters	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc10/testzlibdll.vcxproj	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc10/testzlibdll.vcxproj.filters	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc10/zlib.rc	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc10/zlibstat.vcxproj	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc10/zlibstat.vcxproj.filters	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc10/zlibvc.def	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc10/zlibvc.sln	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc10/zlibvc.vcxproj	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc10/zlibvc.vcxproj.filters	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc11/miniunz.vcxproj	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc11/minizip.vcxproj	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc11/testzlib.vcxproj	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc11/testzlibdll.vcxproj	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc11/zlib.rc	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc11/zlibstat.vcxproj	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc11/zlibvc.def	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc11/zlibvc.sln	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc11/zlibvc.vcxproj	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc12/zlibvc.def	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc14/zlibvc.def	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc9/miniunz.vcproj	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc9/minizip.vcproj	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc9/testzlib.vcproj	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc9/testzlibdll.vcproj	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc9/zlib.rc	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc9/zlibstat.vcproj	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc9/zlibvc.def	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc9/zlibvc.sln	CR/LF line endings
1	compat/zlib/contrib/vstudio/vc9/zlibvc.vcproj	CR/LF line endings
1	compat/zlib/win32/zlib.def	CR/LF line endings
1	compat/zlib/zlib.3.pdf	binary data
1	compat/zlib/zlib.map	CR/LF line endings
1	extsrc/pikchr.wasm	binary data
1	skins/blitz/arrow_project.png	binary data
1	skins/blitz/dir.png	binary data
1	skins/blitz/file.png	binary data
1	skins/blitz/fossil_100.png	binary data
1	skins/blitz/fossil_80_reversed_darkcyan.png	binary data
1	skins/blitz/fossil_80_reversed_darkcyan_text.png	binary data
1	skins/blitz/rss_20.png	binary data
1	src/alerts/bflat2.wav	binary data
1	src/alerts/bflat3.wav	binary data
1	src/alerts/bloop.wav	binary data
1	src/alerts/plunk.wav	binary data
1	src/sounds/0.wav	binary data
1	src/sounds/1.wav	binary data
1	src/sounds/2.wav	binary data
1	src/sounds/3.wav	binary data
1	src/sounds/4.wav	binary data
1	src/sounds/5.wav	binary data
1	src/sounds/6.wav	binary data
1	src/sounds/7.wav	binary data
1	src/sounds/8.wav	binary data
1	src/sounds/9.wav	binary data
1	src/sounds/a.wav	binary data
1	src/sounds/b.wav	binary data
1	src/sounds/c.wav	binary data
1	src/sounds/d.wav	binary data
1	src/sounds/e.wav	binary data
1	src/sounds/f.wav	binary data
1	test/th1-docs-input.txt	CR/LF line endings
1	test/th1-hooks-input.txt	CR/LF line endings
1	test/utf16be.txt	Unicode
1	test/utf16le.txt	Unicode
1	win/buildmsvc.bat	CR/LF line endings
1	win/fossil.ico	binary data
1	win/fossil.rc	invalid UTF-8
1	www/apple-touch-icon.png	binary data
1	www/background.jpg	binary data
1	www/build-icons/linux.gif	binary data
1	www/build-icons/linux64.gif	binary data
1	www/build-icons/mac.gif	binary data
1	www/build-icons/openbsd.gif	binary data
1	www/build-icons/src.gif	binary data
1	www/build-icons/win32.gif	binary data
1	www/copyright-release.pdf	binary data
1	www/encode1.gif	binary data
1	www/encode2.gif	binary data
1	www/encode3.gif	binary data
1	www/encode4.gif	binary data
1	www/encode5.gif	binary data
1	www/encode6.gif	binary data
1	www/encode7.gif	binary data
1	www/encode8.gif	binary data
1	www/encode9.gif	binary data
1	www/fossil.gif	binary data
1	www/fossil2.gif	binary data
1	www/fossil3.gif	binary data
1	www/fossil_logo_small.gif	binary data
1	www/fossil_logo_small2.gif	binary data
1	www/fossil_logo_small3.gif	binary data
1	www/server/windows/cgi-bin-perm.png	binary data
1	www/server/windows/cgi-exec-perm.png	binary data
1	www/server/windows/cgi-install-iis.png	binary data
1	www/server/windows/cgi-script-map.png	binary data
1	www/xkcd-git.gif	binary data
1
test test-framework-diff FAILED!
RESULT: ok
test json-login-c-m FAILED!
RESULT: {
	"fossil":"6dc7e5283c48f55aa7df1d494b9eb9faf8a678517e2632df530a3f0cf445342d",
	"timestamp":1682019070,
	"command":"login",
	"procTimeUs":1263,
	"procTimeMs":1,
	"payload":{
		"authToken":"aff9d0fb42154440029b06939a338b5abc75bafe/2460055.3133117/anonymous",
		"name":"anonymous",
		"capabilities":"hz",
		"loginCookieName":"fossil-c21bf0292c69535f"
	}
}
test json-login-c-n FAILED!
RESULT: {
	"fossil":"6dc7e5283c48f55aa7df1d494b9eb9faf8a678517e2632df530a3f0cf445342d",
	"timestamp":1682019070,
	"command":"login",
	"procTimeUs":1263,
	"procTimeMs":1,
	"payload":{
		"authToken":"aff9d0fb42154440029b06939a338b5abc75bafe/2460055.3133117/anonymous",
		"name":"anonymous",
		"capabilities":"hz",
		"loginCookieName":"fossil-c21bf0292c69535f"
	}
}
test json-login-c-c FAILED!
RESULT: {
	"fossil":"6dc7e5283c48f55aa7df1d494b9eb9faf8a678517e2632df530a3f0cf445342d",
	"timestamp":1682019070,
	"command":"login",
	"procTimeUs":1263,
	"procTimeMs":1,
	"payload":{
		"authToken":"aff9d0fb42154440029b06939a338b5abc75bafe/2460055.3133117/anonymous",
		"name":"anonymous",
		"capabilities":"hz",
		"loginCookieName":"fossil-c21bf0292c69535f"
	}
}
test json-cap-POSTenv-name FAILED (knownBug)!
test json-env-RC-1103-code FAILED (knownBug)!
ERROR (1): directory /tmp/repo_38764/1682019069_10/json.test is not empty
use the -f (--force) option to override
or the -k (--keep) option to keep local files unchanged
ERROR (1): WARNING: 1 merge conflicts
test merge1-2.1 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge1-2.2 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge1-4.1 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge1-4.2 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge1-7.1 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge1-7.2 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-3 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-4 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-5 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-6 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-7 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-8 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-11 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-24 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-25 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-34 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-35 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-44 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-45 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-54 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-55 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-64 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-65 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-74 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-75 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-84 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-85 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-94 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-95 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-103 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge3-104 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge4-1000 FAILED!
RESULT: WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
ERROR (1): WARNING: 1 merge conflicts
test merge4-1002 FAILED!
RESULT: WARNING: 1 merge conflicts
Skipping Merge5 tests
test merge5-sqlite3-issue FAILED (knownBug)!
test test-framework-merge_renames FAILED!
RESULT: EXECUTABLE f1
EXECUTABLE f2
UNEXEC f3
UNEXEC f4
UPDATE f2
MERGE f3
RENAME f1 -> f1n
ADDED f0
 "fossil undo" is available to undo changes to the working checkout.
test test-framework-merge_renames_2 FAILED!
RESULT: EXECUTABLE f1
EXECUTABLE f2
UNEXEC f3
UNEXEC f4
UPDATE f2
MERGE f3
RENAME f1 -> f1n
ADDED f0
 "fossil undo" is available to undo changes to the working checkout.
ERROR (1): MERGE f2
***** 1 merge conflict in f2
DELETE f1
ADDED f3 (overwrites an unmanaged file), original copy backed up locally
 "fossil undo" is available to undo changes to the working checkout.
ignoring --integrate: mrg is not a leaf
WARNING: local edits lost for f1
WARNING: 2 merge conflicts
WARNING: 1 unmanaged files were overwritten
  Expected:
    WARNING: {1 merge conflicts}
    WARNING: {1 unmanaged files were overwritten}
    DELETE f1
    ADDED {f3 (overwrites an unmanaged file)}
    WARNING: {local edits lost for f1}
    WARNING: {no common ancestor for f2}
  Got:
    WARNING: {1 unmanaged files were overwritten}
    WARNING: {2 merge conflicts}
    DELETE f1
    MERGE f2
    ADDED {f3 (overwrites an unmanaged file), original copy backed up locally}
    WARNING: {local edits lost for f1}
test merge_warn-1 FAILED!
RESULT: MERGE f2
***** 1 merge conflict in f2
DELETE f1
ADDED f3 (overwrites an unmanaged file), original copy backed up locally
 "fossil undo" is available to undo changes to the working checkout.
ignoring --integrate: mrg is not a leaf
WARNING: local edits lost for f1
WARNING: 2 merge conflicts
WARNING: 1 unmanaged files were overwritten
test test-framework-mv-rm FAILED!
RESULT: MERGE f2
***** 1 merge conflict in f2
DELETE f1
ADDED f3 (overwrites an unmanaged file), original copy backed up locally
 "fossil undo" is available to undo changes to the working checkout.
ignoring --integrate: mrg is not a leaf
WARNING: local edits lost for f1
WARNING: 2 merge conflicts
WARNING: 1 unmanaged files were overwritten
test test-framework-revert FAILED!
RESULT: 2 a/.fslckout-journal
test test-framework-settings-repo FAILED!
RESULT: New_Version: 84f6602c3debc3072c4240dacb6d4cba9abb92a9a1284b9330f1918d70883d98
test settings-valid-local-auto-hyperlink-delay FAILED!
RESULT: access-log          
admin-log           
allow-symlinks      
auto-captcha        
auto-hyperlink      
auto-hyperlink-delay
auto-hyperlink-mouseover
auto-shun           
autosync            
autosync-tries      
backoffice-disable  
backoffice-logfile  
backoffice-nodelay  
binary-glob         
case-sensitive      
chat-alert-sound    
chat-initial-history
chat-inline-images  
chat-keep-count     
chat-keep-days      
chat-poll-timeout   
chat-timeline-user  
clean-glob          
clearsign           
comment-format      
crlf-glob           
crnl-glob           
default-csp         
default-perms       
diff-binary         
diff-command        
dont-commit         
dont-push           
dotfiles            
editor              
email-admin         
email-listid        
email-renew-interval
email-self          
email-send-command  
email-send-db       
email-send-dir      
email-send-method   
email-send-relayhost
email-subname       
email-url           
empty-dirs          
encoding-glob       
exec-rel-paths      
fileedit-glob       
forbid-delta-manifests
gdiff-command       
gmerge-command      
hash-digits         
hooks               
http-port           
https-login         
ignore-glob         
keep-glob           
large-file-size     
localauth           
lock-timeout        
main-branch         
mainmenu            
manifest            
max-cache-entry     
max-loadavg         
max-upload          
mimetypes           
mtime-changes       
mv-rm-files         
pgp-command         
preferred-diff-type 
proxy               
redirect-to-https   
relative-paths      
repo-cksum          
repolist-skin       
safe-html           
self-pw-reset       
self-register       
sitemap-extra       
ssh-command         
ssl-ca-location     
ssl-identity        
tcl                 
tcl-setup           
tclsh               
th1-docs            
th1-hooks           
th1-setup           
th1-uri-regexp      
ticket-default-report
user-color-map      
uv-sync             
web-browser         
test settings-valid-local-auto-hyperlink-mouseover FAILED!
RESULT: access-log          
admin-log           
allow-symlinks      
auto-captcha        
auto-hyperlink      
auto-hyperlink-delay
auto-hyperlink-mouseover
auto-shun           
autosync            
autosync-tries      
backoffice-disable  
backoffice-logfile  
backoffice-nodelay  
binary-glob         
case-sensitive      
chat-alert-sound    
chat-initial-history
chat-inline-images  
chat-keep-count     
chat-keep-days      
chat-poll-timeout   
chat-timeline-user  
clean-glob          
clearsign           
comment-format      
crlf-glob           
crnl-glob           
default-csp         
default-perms       
diff-binary         
diff-command        
dont-commit         
dont-push           
dotfiles            
editor              
email-admin         
email-listid        
email-renew-interval
email-self          
email-send-command  
email-send-db       
email-send-dir      
email-send-method   
email-send-relayhost
email-subname       
email-url           
empty-dirs          
encoding-glob       
exec-rel-paths      
fileedit-glob       
forbid-delta-manifests
gdiff-command       
gmerge-command      
hash-digits         
hooks               
http-port           
https-login         
ignore-glob         
keep-glob           
large-file-size     
localauth           
lock-timeout        
main-branch         
mainmenu            
manifest            
max-cache-entry     
max-loadavg         
max-upload          
mimetypes           
mtime-changes       
mv-rm-files         
pgp-command         
preferred-diff-type 
proxy               
redirect-to-https   
relative-paths      
repo-cksum          
repolist-skin       
safe-html           
self-pw-reset       
self-register       
sitemap-extra       
ssh-command         
ssl-ca-location     
ssl-identity        
tcl                 
tcl-setup           
tclsh               
th1-docs            
th1-hooks           
th1-setup           
th1-uri-regexp      
ticket-default-report
user-color-map      
uv-sync             
web-browser         
test settings-valid-local-chat-timeline-user FAILED!
RESULT: access-log          
admin-log           
allow-symlinks      
auto-captcha        
auto-hyperlink      
auto-hyperlink-delay
auto-hyperlink-mouseover
auto-shun           
autosync            
autosync-tries      
backoffice-disable  
backoffice-logfile  
backoffice-nodelay  
binary-glob         
case-sensitive      
chat-alert-sound    
chat-initial-history
chat-inline-images  
chat-keep-count     
chat-keep-days      
chat-poll-timeout   
chat-timeline-user  
clean-glob          
clearsign           
comment-format      
crlf-glob           
crnl-glob           
default-csp         
default-perms       
diff-binary         
diff-command        
dont-commit         
dont-push           
dotfiles            
editor              
email-admin         
email-listid        
email-renew-interval
email-self          
email-send-command  
email-send-db       
email-send-dir      
email-send-method   
email-send-relayhost
email-subname       
email-url           
empty-dirs          
encoding-glob       
exec-rel-paths      
fileedit-glob       
forbid-delta-manifests
gdiff-command       
gmerge-command      
hash-digits         
hooks               
http-port           
https-login         
ignore-glob         
keep-glob           
large-file-size     
localauth           
lock-timeout        
main-branch         
mainmenu            
manifest            
max-cache-entry     
max-loadavg         
max-upload          
mimetypes           
mtime-changes       
mv-rm-files         
pgp-command         
preferred-diff-type 
proxy               
redirect-to-https   
relative-paths      
repo-cksum          
repolist-skin       
safe-html           
self-pw-reset       
self-register       
sitemap-extra       
ssh-command         
ssl-ca-location     
ssl-identity        
tcl                 
tcl-setup           
tclsh               
th1-docs            
th1-hooks           
th1-setup           
th1-uri-regexp      
ticket-default-report
user-color-map      
uv-sync             
web-browser         
test settings-valid-local-dont-commit FAILED!
RESULT: access-log          
admin-log           
allow-symlinks      
auto-captcha        
auto-hyperlink      
auto-hyperlink-delay
auto-hyperlink-mouseover
auto-shun           
autosync            
autosync-tries      
backoffice-disable  
backoffice-logfile  
backoffice-nodelay  
binary-glob         
case-sensitive      
chat-alert-sound    
chat-initial-history
chat-inline-images  
chat-keep-count     
chat-keep-days      
chat-poll-timeout   
chat-timeline-user  
clean-glob          
clearsign           
comment-format      
crlf-glob           
crnl-glob           
default-csp         
default-perms       
diff-binary         
diff-command        
dont-commit         
dont-push           
dotfiles            
editor              
email-admin         
email-listid        
email-renew-interval
email-self          
email-send-command  
email-send-db       
email-send-dir      
email-send-method   
email-send-relayhost
email-subname       
email-url           
empty-dirs          
encoding-glob       
exec-rel-paths      
fileedit-glob       
forbid-delta-manifests
gdiff-command       
gmerge-command      
hash-digits         
hooks               
http-port           
https-login         
ignore-glob         
keep-glob           
large-file-size     
localauth           
lock-timeout        
main-branch         
mainmenu            
manifest            
max-cache-entry     
max-loadavg         
max-upload          
mimetypes           
mtime-changes       
mv-rm-files         
pgp-command         
preferred-diff-type 
proxy               
redirect-to-https   
relative-paths      
repo-cksum          
repolist-skin       
safe-html           
self-pw-reset       
self-register       
sitemap-extra       
ssh-command         
ssl-ca-location     
ssl-identity        
tcl                 
tcl-setup           
tclsh               
th1-docs            
th1-hooks           
th1-setup           
th1-uri-regexp      
ticket-default-report
user-color-map      
uv-sync             
web-browser         
test settings-valid-local-email-listid FAILED!
RESULT: access-log          
admin-log           
allow-symlinks      
auto-captcha        
auto-hyperlink      
auto-hyperlink-delay
auto-hyperlink-mouseover
auto-shun           
autosync            
autosync-tries      
backoffice-disable  
backoffice-logfile  
backoffice-nodelay  
binary-glob         
case-sensitive      
chat-alert-sound    
chat-initial-history
chat-inline-images  
chat-keep-count     
chat-keep-days      
chat-poll-timeout   
chat-timeline-user  
clean-glob          
clearsign           
comment-format      
crlf-glob           
crnl-glob           
default-csp         
default-perms       
diff-binary         
diff-command        
dont-commit         
dont-push           
dotfiles            
editor              
email-admin         
email-listid        
email-renew-interval
email-self          
email-send-command  
email-send-db       
email-send-dir      
email-send-method   
email-send-relayhost
email-subname       
email-url           
empty-dirs          
encoding-glob       
exec-rel-paths      
fileedit-glob       
forbid-delta-manifests
gdiff-command       
gmerge-command      
hash-digits         
hooks               
http-port           
https-login         
ignore-glob         
keep-glob           
large-file-size     
localauth           
lock-timeout        
main-branch         
mainmenu            
manifest            
max-cache-entry     
max-loadavg         
max-upload          
mimetypes           
mtime-changes       
mv-rm-files         
pgp-command         
preferred-diff-type 
proxy               
redirect-to-https   
relative-paths      
repo-cksum          
repolist-skin       
safe-html           
self-pw-reset       
self-register       
sitemap-extra       
ssh-command         
ssl-ca-location     
ssl-identity        
tcl                 
tcl-setup           
tclsh               
th1-docs            
th1-hooks           
th1-setup           
th1-uri-regexp      
ticket-default-report
user-color-map      
uv-sync             
web-browser         
test settings-valid-local-large-file-size FAILED!
RESULT: access-log          
admin-log           
allow-symlinks      
auto-captcha        
auto-hyperlink      
auto-hyperlink-delay
auto-hyperlink-mouseover
auto-shun           
autosync            
autosync-tries      
backoffice-disable  
backoffice-logfile  
backoffice-nodelay  
binary-glob         
case-sensitive      
chat-alert-sound    
chat-initial-history
chat-inline-images  
chat-keep-count     
chat-keep-days      
chat-poll-timeout   
chat-timeline-user  
clean-glob          
clearsign           
comment-format      
crlf-glob           
crnl-glob           
default-csp         
default-perms       
diff-binary         
diff-command        
dont-commit         
dont-push           
dotfiles            
editor              
email-admin         
email-listid        
email-renew-interval
email-self          
email-send-command  
email-send-db       
email-send-dir      
email-send-method   
email-send-relayhost
email-subname       
email-url           
empty-dirs          
encoding-glob       
exec-rel-paths      
fileedit-glob       
forbid-delta-manifests
gdiff-command       
gmerge-command      
hash-digits         
hooks               
http-port           
https-login         
ignore-glob         
keep-glob           
large-file-size     
localauth           
lock-timeout        
main-branch         
mainmenu            
manifest            
max-cache-entry     
max-loadavg         
max-upload          
mimetypes           
mtime-changes       
mv-rm-files         
pgp-command         
preferred-diff-type 
proxy               
redirect-to-https   
relative-paths      
repo-cksum          
repolist-skin       
safe-html           
self-pw-reset       
self-register       
sitemap-extra       
ssh-command         
ssl-ca-location     
ssl-identity        
tcl                 
tcl-setup           
tclsh               
th1-docs            
th1-hooks           
th1-setup           
th1-uri-regexp      
ticket-default-report
user-color-map      
uv-sync             
web-browser         
test settings-valid-local-self-pw-reset FAILED!
RESULT: access-log          
admin-log           
allow-symlinks      
auto-captcha        
auto-hyperlink      
auto-hyperlink-delay
auto-hyperlink-mouseover
auto-shun           
autosync            
autosync-tries      
backoffice-disable  
backoffice-logfile  
backoffice-nodelay  
binary-glob         
case-sensitive      
chat-alert-sound    
chat-initial-history
chat-inline-images  
chat-keep-count     
chat-keep-days      
chat-poll-timeout   
chat-timeline-user  
clean-glob          
clearsign           
comment-format      
crlf-glob           
crnl-glob           
default-csp         
default-perms       
diff-binary         
diff-command        
dont-commit         
dont-push           
dotfiles            
editor              
email-admin         
email-listid        
email-renew-interval
email-self          
email-send-command  
email-send-db       
email-send-dir      
email-send-method   
email-send-relayhost
email-subname       
email-url           
empty-dirs          
encoding-glob       
exec-rel-paths      
fileedit-glob       
forbid-delta-manifests
gdiff-command       
gmerge-command      
hash-digits         
hooks               
http-port           
https-login         
ignore-glob         
keep-glob           
large-file-size     
localauth           
lock-timeout        
main-branch         
mainmenu            
manifest            
max-cache-entry     
max-loadavg         
max-upload          
mimetypes           
mtime-changes       
mv-rm-files         
pgp-command         
preferred-diff-type 
proxy               
redirect-to-https   
relative-paths      
repo-cksum          
repolist-skin       
safe-html           
self-pw-reset       
self-register       
sitemap-extra       
ssh-command         
ssl-ca-location     
ssl-identity        
tcl                 
tcl-setup           
tclsh               
th1-docs            
th1-hooks           
th1-setup           
th1-uri-regexp      
ticket-default-report
user-color-map      
uv-sync             
web-browser         
test settings-valid-global-auto-hyperlink-delay FAILED!
RESULT: access-log          
admin-log           
allow-symlinks      
auto-captcha        
auto-hyperlink      
auto-hyperlink-delay
auto-hyperlink-mouseover
auto-shun           
autosync            
autosync-tries      
backoffice-disable  
backoffice-logfile  
backoffice-nodelay  
binary-glob         
case-sensitive      
chat-alert-sound    
chat-initial-history
chat-inline-images  
chat-keep-count     
chat-keep-days      
chat-poll-timeout   
chat-timeline-user  
clean-glob          
clearsign           
comment-format      
crlf-glob           
crnl-glob           
default-csp         
default-perms       
diff-binary         
diff-command        
dont-commit         
dont-push           
dotfiles            
editor              
email-admin         
email-listid        
email-renew-interval
email-self          
email-send-command  
email-send-db       
email-send-dir      
email-send-method   
email-send-relayhost
email-subname       
email-url           
empty-dirs          
encoding-glob       
exec-rel-paths      
fileedit-glob       
forbid-delta-manifests
gdiff-command       
gmerge-command      
hash-digits         
hooks               
http-port           
https-login         
ignore-glob         
keep-glob           
large-file-size     
localauth           
lock-timeout        
main-branch         
mainmenu            
manifest            
max-cache-entry     
max-loadavg         
max-upload          
mimetypes           
mtime-changes       
mv-rm-files         
pgp-command         
preferred-diff-type 
proxy               
redirect-to-https   
relative-paths      
repo-cksum          
repolist-skin       
safe-html           
self-pw-reset       
self-register       
sitemap-extra       
ssh-command         
ssl-ca-location     
ssl-identity        
tcl                 
tcl-setup           
tclsh               
th1-docs            
th1-hooks           
th1-setup           
th1-uri-regexp      
ticket-default-report
user-color-map      
uv-sync             
web-browser         
test settings-valid-global-auto-hyperlink-mouseover FAILED!
RESULT: access-log          
admin-log           
allow-symlinks      
auto-captcha        
auto-hyperlink      
auto-hyperlink-delay
auto-hyperlink-mouseover
auto-shun           
autosync            
autosync-tries      
backoffice-disable  
backoffice-logfile  
backoffice-nodelay  
binary-glob         
case-sensitive      
chat-alert-sound    
chat-initial-history
chat-inline-images  
chat-keep-count     
chat-keep-days      
chat-poll-timeout   
chat-timeline-user  
clean-glob          
clearsign           
comment-format      
crlf-glob           
crnl-glob           
default-csp         
default-perms       
diff-binary         
diff-command        
dont-commit         
dont-push           
dotfiles            
editor              
email-admin         
email-listid        
email-renew-interval
email-self          
email-send-command  
email-send-db       
email-send-dir      
email-send-method   
email-send-relayhost
email-subname       
email-url           
empty-dirs          
encoding-glob       
exec-rel-paths      
fileedit-glob       
forbid-delta-manifests
gdiff-command       
gmerge-command      
hash-digits         
hooks               
http-port           
https-login         
ignore-glob         
keep-glob           
large-file-size     
localauth           
lock-timeout        
main-branch         
mainmenu            
manifest            
max-cache-entry     
max-loadavg         
max-upload          
mimetypes           
mtime-changes       
mv-rm-files         
pgp-command         
preferred-diff-type 
proxy               
redirect-to-https   
relative-paths      
repo-cksum          
repolist-skin       
safe-html           
self-pw-reset       
self-register       
sitemap-extra       
ssh-command         
ssl-ca-location     
ssl-identity        
tcl                 
tcl-setup           
tclsh               
th1-docs            
th1-hooks           
th1-setup           
th1-uri-regexp      
ticket-default-report
user-color-map      
uv-sync             
web-browser         
test settings-valid-global-chat-timeline-user FAILED!
RESULT: access-log          
admin-log           
allow-symlinks      
auto-captcha        
auto-hyperlink      
auto-hyperlink-delay
auto-hyperlink-mouseover
auto-shun           
autosync            
autosync-tries      
backoffice-disable  
backoffice-logfile  
backoffice-nodelay  
binary-glob         
case-sensitive      
chat-alert-sound    
chat-initial-history
chat-inline-images  
chat-keep-count     
chat-keep-days      
chat-poll-timeout   
chat-timeline-user  
clean-glob          
clearsign           
comment-format      
crlf-glob           
crnl-glob           
default-csp         
default-perms       
diff-binary         
diff-command        
dont-commit         
dont-push           
dotfiles            
editor              
email-admin         
email-listid        
email-renew-interval
email-self          
email-send-command  
email-send-db       
email-send-dir      
email-send-method   
email-send-relayhost
email-subname       
email-url           
empty-dirs          
encoding-glob       
exec-rel-paths      
fileedit-glob       
forbid-delta-manifests
gdiff-command       
gmerge-command      
hash-digits         
hooks               
http-port           
https-login         
ignore-glob         
keep-glob           
large-file-size     
localauth           
lock-timeout        
main-branch         
mainmenu            
manifest            
max-cache-entry     
max-loadavg         
max-upload          
mimetypes           
mtime-changes       
mv-rm-files         
pgp-command         
preferred-diff-type 
proxy               
redirect-to-https   
relative-paths      
repo-cksum          
repolist-skin       
safe-html           
self-pw-reset       
self-register       
sitemap-extra       
ssh-command         
ssl-ca-location     
ssl-identity        
tcl                 
tcl-setup           
tclsh               
th1-docs            
th1-hooks           
th1-setup           
th1-uri-regexp      
ticket-default-report
user-color-map      
uv-sync             
web-browser         
test settings-valid-global-dont-commit FAILED!
RESULT: access-log          
admin-log           
allow-symlinks      
auto-captcha        
auto-hyperlink      
auto-hyperlink-delay
auto-hyperlink-mouseover
auto-shun           
autosync            
autosync-tries      
backoffice-disable  
backoffice-logfile  
backoffice-nodelay  
binary-glob         
case-sensitive      
chat-alert-sound    
chat-initial-history
chat-inline-images  
chat-keep-count     
chat-keep-days      
chat-poll-timeout   
chat-timeline-user  
clean-glob          
clearsign           
comment-format      
crlf-glob           
crnl-glob           
default-csp         
default-perms       
diff-binary         
diff-command        
dont-commit         
dont-push           
dotfiles            
editor              
email-admin         
email-listid        
email-renew-interval
email-self          
email-send-command  
email-send-db       
email-send-dir      
email-send-method   
email-send-relayhost
email-subname       
email-url           
empty-dirs          
encoding-glob       
exec-rel-paths      
fileedit-glob       
forbid-delta-manifests
gdiff-command       
gmerge-command      
hash-digits         
hooks               
http-port           
https-login         
ignore-glob         
keep-glob           
large-file-size     
localauth           
lock-timeout        
main-branch         
mainmenu            
manifest            
max-cache-entry     
max-loadavg         
max-upload          
mimetypes           
mtime-changes       
mv-rm-files         
pgp-command         
preferred-diff-type 
proxy               
redirect-to-https   
relative-paths      
repo-cksum          
repolist-skin       
safe-html           
self-pw-reset       
self-register       
sitemap-extra       
ssh-command         
ssl-ca-location     
ssl-identity        
tcl                 
tcl-setup           
tclsh               
th1-docs            
th1-hooks           
th1-setup           
th1-uri-regexp      
ticket-default-report
user-color-map      
uv-sync             
web-browser         
test settings-valid-global-email-listid FAILED!
RESULT: access-log          
admin-log           
allow-symlinks      
auto-captcha        
auto-hyperlink      
auto-hyperlink-delay
auto-hyperlink-mouseover
auto-shun           
autosync            
autosync-tries      
backoffice-disable  
backoffice-logfile  
backoffice-nodelay  
binary-glob         
case-sensitive      
chat-alert-sound    
chat-initial-history
chat-inline-images  
chat-keep-count     
chat-keep-days      
chat-poll-timeout   
chat-timeline-user  
clean-glob          
clearsign           
comment-format      
crlf-glob           
crnl-glob           
default-csp         
default-perms       
diff-binary         
diff-command        
dont-commit         
dont-push           
dotfiles            
editor              
email-admin         
email-listid        
email-renew-interval
email-self          
email-send-command  
email-send-db       
email-send-dir      
email-send-method   
email-send-relayhost
email-subname       
email-url           
empty-dirs          
encoding-glob       
exec-rel-paths      
fileedit-glob       
forbid-delta-manifests
gdiff-command       
gmerge-command      
hash-digits         
hooks               
http-port           
https-login         
ignore-glob         
keep-glob           
large-file-size     
localauth           
lock-timeout        
main-branch         
mainmenu            
manifest            
max-cache-entry     
max-loadavg         
max-upload          
mimetypes           
mtime-changes       
mv-rm-files         
pgp-command         
preferred-diff-type 
proxy               
redirect-to-https   
relative-paths      
repo-cksum          
repolist-skin       
safe-html           
self-pw-reset       
self-register       
sitemap-extra       
ssh-command         
ssl-ca-location     
ssl-identity        
tcl                 
tcl-setup           
tclsh               
th1-docs            
th1-hooks           
th1-setup           
th1-uri-regexp      
ticket-default-report
user-color-map      
uv-sync             
web-browser         
test settings-valid-global-large-file-size FAILED!
RESULT: access-log          
admin-log           
allow-symlinks      
auto-captcha        
auto-hyperlink      
auto-hyperlink-delay
auto-hyperlink-mouseover
auto-shun           
autosync            
autosync-tries      
backoffice-disable  
backoffice-logfile  
backoffice-nodelay  
binary-glob         
case-sensitive      
chat-alert-sound    
chat-initial-history
chat-inline-images  
chat-keep-count     
chat-keep-days      
chat-poll-timeout   
chat-timeline-user  
clean-glob          
clearsign           
comment-format      
crlf-glob           
crnl-glob           
default-csp         
default-perms       
diff-binary         
diff-command        
dont-commit         
dont-push           
dotfiles            
editor              
email-admin         
email-listid        
email-renew-interval
email-self          
email-send-command  
email-send-db       
email-send-dir      
email-send-method   
email-send-relayhost
email-subname       
email-url           
empty-dirs          
encoding-glob       
exec-rel-paths      
fileedit-glob       
forbid-delta-manifests
gdiff-command       
gmerge-command      
hash-digits         
hooks               
http-port           
https-login         
ignore-glob         
keep-glob           
large-file-size     
localauth           
lock-timeout        
main-branch         
mainmenu            
manifest            
max-cache-entry     
max-loadavg         
max-upload          
mimetypes           
mtime-changes       
mv-rm-files         
pgp-command         
preferred-diff-type 
proxy               
redirect-to-https   
relative-paths      
repo-cksum          
repolist-skin       
safe-html           
self-pw-reset       
self-register       
sitemap-extra       
ssh-command         
ssl-ca-location     
ssl-identity        
tcl                 
tcl-setup           
tclsh               
th1-docs            
th1-hooks           
th1-setup           
th1-uri-regexp      
ticket-default-report
user-color-map      
uv-sync             
web-browser         
test settings-valid-global-self-pw-reset FAILED!
RESULT: access-log          
admin-log           
allow-symlinks      
auto-captcha        
auto-hyperlink      
auto-hyperlink-delay
auto-hyperlink-mouseover
auto-shun           
autosync            
autosync-tries      
backoffice-disable  
backoffice-logfile  
backoffice-nodelay  
binary-glob         
case-sensitive      
chat-alert-sound    
chat-initial-history
chat-inline-images  
chat-keep-count     
chat-keep-days      
chat-poll-timeout   
chat-timeline-user  
clean-glob          
clearsign           
comment-format      
crlf-glob           
crnl-glob           
default-csp         
default-perms       
diff-binary         
diff-command        
dont-commit         
dont-push           
dotfiles            
editor              
email-admin         
email-listid        
email-renew-interval
email-self          
email-send-command  
email-send-db       
email-send-dir      
email-send-method   
email-send-relayhost
email-subname       
email-url           
empty-dirs          
encoding-glob       
exec-rel-paths      
fileedit-glob       
forbid-delta-manifests
gdiff-command       
gmerge-command      
hash-digits         
hooks               
http-port           
https-login         
ignore-glob         
keep-glob           
large-file-size     
localauth           
lock-timeout        
main-branch         
mainmenu            
manifest            
max-cache-entry     
max-loadavg         
max-upload          
mimetypes           
mtime-changes       
mv-rm-files         
pgp-command         
preferred-diff-type 
proxy               
redirect-to-https   
relative-paths      
repo-cksum          
repolist-skin       
safe-html           
self-pw-reset       
self-register       
sitemap-extra       
ssh-command         
ssl-ca-location     
ssl-identity        
tcl                 
tcl-setup           
tclsh               
th1-docs            
th1-hooks           
th1-setup           
th1-uri-regexp      
ticket-default-report
user-color-map      
uv-sync             
web-browser         
ERROR (1): cannot set 'manifest' globally
ERROR (1): no such setting: bad-setting
ERROR (1): no such setting: bad-setting
test test-framework-stash FAILED!
RESULT: no such setting: bad-setting
Symlinks are not enabled.
ERROR (1): /***** Subprocess 36246 exit(0) *****/
ERROR (1): 
<h1><b>command_hook timeline</b></h1>
unknown check-in or invalid date: custom3
ERROR (1): 
<h1><b>command_hook test4</b></h1>
/usr/home/patmaddox/oss/fossil-scm/fossil: unknown command: test4
/usr/home/patmaddox/oss/fossil-scm/fossil: use "help" for more information
ERROR (1): /***** Subprocess 43782 exit(0) *****/
ERROR (1): /***** Subprocess 44409 exit(0) *****/
test test-framework-th1-repo FAILED!
RESULT: 
<h1><b>command_hook settings command_notify settings</b></h1>
ERROR (1): 

------------------ BEGIN TRACE LOG ------------------
th1-init 0x0 => 0x0<br />

------------------- END TRACE LOG -------------------
/***** Subprocess 54956 exit(0) *****/
ERROR (1): 

------------------ BEGIN TRACE LOG ------------------
th1-init 0x0 => 0x0<br />
this is a trace message.
------------------- END TRACE LOG -------------------
/***** Subprocess 55847 exit(0) *****/
test th1-defHeader-2 FAILED!
RESULT: <html>
<head>
<meta charset="UTF-8">
<base href="$baseurl/$current_page" />
<meta http-equiv="Content-Security-Policy" content="$default_csp" />
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>$<project_name>: $<title></title>
<link rel="alternate" type="application/rss+xml" title="RSS Feed"  href="$home/timeline.rss" />
<link rel="stylesheet" href="$stylesheet_url" type="text/css" />
</head>
<body class="$current_feature rpage-$requested_page cpage-$canonical_page">

test th1-info-commands-1 FAILED!
RESULT: linecount htmlize date nonce stime enable_output uplevel dir http expr glob_match builtin_request_js verifyLogin utime styleFooter encode64 catch if tclReady searchable reinitialize combobox lindex tclIsSafe query html anoncap randhex llength for set break regexp markdown defHeader copybtn styleHeader puts foreach return insertCsrf checkout decorate redirect artifact trace capexpr wiki proc tclInvoke hascap globalState continue cgiHeaderLine getParameter hasfeature lappend setting lsearch verifyCsrf breakpoint styleScript upvar render enable_htmlify repository string captureTh1 unset setParameter list error info rename submenu tclExpr array anycap unversioned tclEval httpize tclMakeSafe
test test-framework-unversioned FAILED!
RESULT: 10
***** Final results: 63 errors out of 40341 tests
***** Considered failures: pre-commit-warnings-fossil-1 test-framework-diff json-login-c-m json-login-c-n json-login-c-c merge1-2.1 merge1-2.2 merge1-4.1 merge1-4.2 merge1-7.1 merge1-7.2 merge3-3 merge3-4 merge3-5 merge3-6 merge3-7 merge3-8 merge3-11 merge3-24 merge3-25 merge3-34 merge3-35 merge3-44 merge3-45 merge3-54 merge3-55 merge3-64 merge3-65 merge3-74 merge3-75 merge3-84 merge3-85 merge3-94 merge3-95 merge3-103 merge3-104 merge4-1000 merge4-1002 test-framework-merge_renames test-framework-merge_renames_2 merge_warn-1 test-framework-mv-rm test-framework-revert test-framework-settings-repo settings-valid-local-auto-hyperlink-delay settings-valid-local-auto-hyperlink-mouseover settings-valid-local-chat-timeline-user settings-valid-local-dont-commit settings-valid-local-email-listid settings-valid-local-large-file-size settings-valid-local-self-pw-reset settings-valid-global-auto-hyperlink-delay settings-valid-global-auto-hyperlink-mouseover settings-valid-global-chat-timeline-user settings-valid-global-dont-commit settings-valid-global-email-listid settings-valid-global-large-file-size settings-valid-global-self-pw-reset test-framework-stash test-framework-th1-repo th1-defHeader-2 th1-info-commands-1 test-framework-unversioned
***** Ignored results: 3 ignored errors out of 40341 tests
***** Ignored failures: json-cap-POSTenv-name json-env-RC-1103-code merge5-sqlite3-issue
patmaddox@beastie:~/oss/fossil-scm $

Added ports/Justfile.











































































































































































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
ports_dir := "freebsd-ports.git"
ports_main := ports_dir + "/main"

help:
  @just -l

build *flags='':
  poudriere bulk -j 132amd64 {{flags}}

@pkg-push:
  syncoid --no-rollback --no-privilege-elevation --quiet zroot/poudriere/data/packages bot-sanoid@nas:zdata/crypt/pkg

build-main:
  poudriere bulk -p main -j 132amd64 -f prod -b latest

build-patmaddox:
  poudriere bulk -p main -j 132amd64 -f prod -b latest -O maintain -z patmaddox

build-prod *flags='': # build prod ports, fetching pre-built packages
  poudriere bulk -j 132amd64 -f prod -b latest {{flags}}

build-dev:
  poudriere bulk -j 132amd64 -C -f dev

# update git remotes
update-remotes:
  git -C {{ports_main}} remote update

clean:
  poudriere pkgclean -j 132amd64 -f prod

list-local:
  @cd tree && ls -1 */*/Makefile

freebsd-enable:
  @sed -I '' -e 's/enabled: no$/enabled: yes/' /usr/local/etc/pkg/repos/FreeBSD.conf

freebsd-disable:
  @sed -I '' -e 's/enabled: yes$/enabled: no/' /usr/local/etc/pkg/repos/FreeBSD.conf

port name: _ports-git
  ./lib/make-worktree.sh {{name}}

_ports-git:
  @./lib/checkout-ports.sh

# list available branches
branches: _ports-git
  git -C {{ports_main}} branch

# run a git command in the given branch
git branch *args:
  git -C {{ports_dir}}/{{branch}} {{args}}

log branch *args: _ports-git
  cd freebsd-ports.git/{{branch}} && git log {{args}}

# create a new worktree
branch b:
  #!/bin/sh
  if git -C {{ports_main}} branch --format '%(refname:short)' | grep '^{{b}}$' -; then
    git -C {{ports_main}} worktree add ../{{b}} {{b}}
    exit 0
  fi

  git -C {{ports_main}} remote update patmaddox
  if git -C {{ports_main}} ls-remote --exit-code --heads patmaddox {{b}} > /dev/null; then
    git -C {{ports_main}} worktree add ../{{b}} patmaddox/{{b}}
    exit 0
  fi

  git -C {{ports_main}} worktree add -b {{b}} ../{{b}}

delete-branch b:
  if [ -d {{ports_dir}}/{{b}} ]; then git -C {{ports_main}} worktree remove ../{{b}}; fi
  git -C {{ports_main}} branch -d {{b}}

push branch:
  git -C {{ports_dir}}/{{branch}} push patmaddox {{branch}}

# run `git worktree` commands in main ports tree
worktree *args:
  git -C {{ports_main}} worktree {{args}}

status:
  #!/bin/sh
  missing_trees=""
  for b in $(git -C {{ports_main}} branch --format '%(refname:short)'); do
    if [ -d {{ports_dir}}/$b ]; then
      git -C freebsd-ports.git/$b status --short --branch
    else
      missing_trees="- $missing_trees$b\n"
    fi
  done
  if [ -n "$missing_trees" ]; then
    echo "no worktrees for the following branches":
    printf -- "$missing_trees"
  fi

pull: _ports-git
  @cd freebsd-ports.git/main && git pull --ff-only

push-n *branches: _ports-git
  #!/bin/sh
  if [ -z "{{branches}}" ]; then
    branches="patmaddox-ports $(cat merge-branches)"
  else
    branches="{{branches}}"
  fi
  merge_branches=""
  for b in $branches; do
    if [ -d freebsd-ports.git/$b ]; then
      merge_branches="$merge_branches $b"
    fi
  done
  cd freebsd-ports.git/main
  git push --atomic patmaddox $merge_branches

merge: _myports
  #!/bin/sh
  branches=$(cat merge-branches)
  merge_branches=""
  for b in $branches; do
    if [ -d freebsd-ports.git/$b ]; then
      merge_branches="$merge_branches $b"
    fi
  done
  cd freebsd-ports.git/patmaddox-ports
  git merge --no-ff --no-edit $merge_branches

_myports: _ports-git
  #!/bin/sh
  if [ ! -d freebsd-ports.git/patmaddox-ports ]; then
    cd freebsd-ports.git/main
    git worktree add --guess-remote ../patmaddox-ports
  fi

scratch +branches: _scratch
  #!/bin/sh
  cd freebsd-ports.git/scratch
  git reset --hard main
  git merge --no-ff --no-edit {{branches}}

_scratch: _ports-git
  #!/bin/sh
  if [ ! -d freebsd-ports.git/scratch ]; then
    cd freebsd-ports.git/main
    git worktree add ../scratch
  fi

upgrade repo *flags:
  pkg upgrade -r {{repo}} {{flags}} $(pkg query '%n %R' | grep -v '{{repo}}$' | awk 'NF{NF--};1' | tr '\n' ' ')

@list-leaves:
  pkg query -e '%a = 0' %o

@merge-leaves:
  pkg query -e '%a = 0' %o > all-leaves
  cat prod >> all-leaves
  sort -u all-leaves > prod
  rm all-leaves

# add a poudriere ports tree for a given branch
add-ports-tree name branch:
  poudriere ports -c -p {{name}} -m null -M $(pwd)/{{ports_dir}}/{{branch}}

Added ports/Makefile.











































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
.PHONY: main ports

PORTS_DIR= freebsd-ports.git
PORTS_MAIN= ${PORTS_DIR}/main
PORTS_MAINTAIN= ${PORTS_DIR}/maintain

PORTS=	py-hologram \
	py-snowflake-connector-python \
	py-dbt-semantic-interfaces \
	py-minimal-snowplow-tracker \
	py-mashumaro \
	py-tree-sitter \
	py-schemachange \
	py-snowddl \
	py-dbt-extractor \
	py-dbt-core \
	py-dbt-snowflake \
	py-dbt-duckdb

help:
	@echo targets:
	@echo "  main"
	@echo "  main-f # touch main to force downstream rebuilds"
	@echo "  ports"

main:
	just update-main

main-f:
	just update-main
	touch ${PORTS_MAIN}

ports: ${PORTS}

# no deps
py-hologram: ${PORTS_MAINTAIN}/py-hologram
${PORTS_MAINTAIN}/py-hologram: ${PORTS_MAIN}
	git -C ${.TARGET} rebase --quiet main
	touch ${.TARGET}

py-snowflake-connector-python: ${PORTS_MAINTAIN}/py-snowflake-connector-python
${PORTS_MAINTAIN}/py-snowflake-connector-python: ${PORTS_MAIN}
	git -C ${.TARGET} rebase --quiet main
	touch ${.TARGET}

py-dbt-semantic-interfaces: ${PORTS_MAINTAIN}/py-dbt-semantic-interfaces
${PORTS_MAINTAIN}/py-dbt-semantic-interfaces: ${PORTS_MAIN}
	git -C ${.TARGET} rebase --quiet main
	touch ${.TARGET}

py-minimal-snowplow-tracker: ${PORTS_MAINTAIN}/py-minimal-snowplow-tracker
${PORTS_MAINTAIN}/py-minimal-snowplow-tracker: ${PORTS_MAIN}
	git -C ${.TARGET} rebase --quiet main
	touch ${.TARGET}

py-mashumaro: ${PORTS_MAINTAIN}/py-mashumaro
${PORTS_MAINTAIN}/py-mashumaro: ${PORTS_MAIN}
	git -C ${.TARGET} rebase --quiet main
	touch ${.TARGET}

py-tree-sitter: ${PORTS_MAINTAIN}/py-tree-sitter
${PORTS_MAINTAIN}/py-tree-sitter: ${PORTS_MAIN}
	git -C ${.TARGET} rebase --quiet main
	touch ${.TARGET}

# snowflake
py-schemachange: ${PORTS_MAINTAIN}/py-schemachange
${PORTS_MAINTAIN}/py-schemachange: ${PORTS_MAINTAIN}/py-snowflake-connector-python
	git -C ${.TARGET} rebase --quiet maintain/py-snowflake-connector-python
	touch ${.TARGET}

py-snowddl: ${PORTS_MAINTAIN}/py-snowddl
${PORTS_MAINTAIN}/py-snowddl: ${PORTS_MAINTAIN}/py-snowflake-connector-python
	git -C ${.TARGET} rebase --quiet maintain/py-snowflake-connector-python
	touch ${.TARGET}

# dbt
py-dbt-extractor: ${PORTS_MAINTAIN}/py-dbt-extractor
${PORTS_MAINTAIN}/py-dbt-extractor: ${PORTS_MAINTAIN}/py-tree-sitter
	git -C ${.TARGET} rebase --quiet maintain/py-tree-sitter
	touch ${.TARGET}

py-dbt-core: ${PORTS_MAINTAIN}/py-dbt-core
${PORTS_MAINTAIN}/py-dbt-core: ${PORTS_MAINTAIN}/py-mashumaro ${PORTS_MAINTAIN}/py-dbt-extractor ${PORTS_MAINTAIN}/py-hologram ${PORTS_MAINTAIN}/py-minimal-snowplow-tracker ${PORTS_MAINTAIN}/py-dbt-semantic-interfaces
	git -C ${.TARGET} rebase --quiet maintain/py-mashumaro
	git -C ${.TARGET} rebase --quiet maintain/py-dbt-extractor
	git -C ${.TARGET} rebase --quiet maintain/py-hologram
	git -C ${.TARGET} rebase --quiet maintain/py-minimal-snowplow-tracker
	git -C ${.TARGET} rebase --quiet maintain/py-dbt-semantic-interfaces
	touch ${.TARGET}

py-dbt-snowflake: ${PORTS_MAINTAIN}/py-dbt-snowflake
${PORTS_MAINTAIN}/py-dbt-snowflake: ${PORTS_MAINTAIN}/py-snowflake-connector-python ${PORTS_MAINTAIN}/py-dbt-core
	git -C ${.TARGET} rebase --quiet maintain/py-snowflake-connector-python
	git -C ${.TARGET} rebase --quiet maintain/py-dbt-core
	touch ${.TARGET}

py-dbt-duckdb: ${PORTS_MAINTAIN}/py-dbt-duckdb
${PORTS_MAINTAIN}/py-dbt-duckdb: ${PORTS_MAINTAIN}/py-dbt-core
	git -C ${.TARGET} rebase --quiet maintain/py-dbt-core
	touch ${.TARGET}

Added ports/dev.







>
>
>
1
2
3
#sysutils/jectl
#patmaddox/elixir-bundled-app
patmaddox/org

Added ports/examples/pkg-repo-poudriere.conf.



























































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
# /usr/local/etc/pkg/repos/poudriere.conf
p7x {
  url: "https://pkg.p7x.sh/132amd64-default/",
  enabled: yes,
  priority: 20
}

local {
  url: "file:///usr/local/poudriere/data/packages/132amd64-default",
  enabled: yes,
  priority: 10
}

scratch {
  url: "file:///usr/local/poudriere/data/packages/132amd64-scratch",
  enabled: yes,
  priority: 0
}

FreeBSD: { enabled: no }

freebsd: {
  url: "pkg+http://pkg.FreeBSD.org/${ABI}/latest",
  mirror_type: "srv",
  signature_type: "fingerprints",
  fingerprints: "/usr/share/keys/pkg",
  priority: 0,
  enabled: yes
}

Added ports/lib/checkout-ports.sh.



























>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
#!/bin/sh
ports=freebsd-ports.git
main=$ports/main

if [ ! -d $main ]; then
    mkdir -p $main
    cd $main
    git init
    git remote add freebsd https://github.com/freebsd/freebsd-ports.git
    git remote add patmaddox git@github.com:patmaddox/freebsd-ports.git
    git remote update
    git checkout --track freebsd/main
fi

Added ports/lib/make-worktree.sh.

































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
#!/bin/sh
port=${1:?}
worktree=$(echo $port | tr / -)

if [ ! -d freebsd-ports.git/main/$port ]; then
    echo "no such port: $port" 1>&2
    exit 1
fi

if [ ! -d freebsd-ports.git/$worktree ]; then
    cd freebsd-ports.git/main
    git worktree add --no-checkout ../$worktree
    cd ../$worktree
    git sparse-checkout set $port
    git reset --hard HEAD
fi

Added ports/maintain/Justfile.

























































































































































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
ports_dir := "freebsd-ports.git"
ports_main := ports_dir + "/main"

export PORTSDIR := "/home/patmaddox/ports/freebsd-ports.git/main"

help:
  just -l

# update main
update-main:
  #!/bin/sh
  set -e
  git -C {{ports_main}} remote update freebsd
  git -C {{ports_main}} reset --hard freebsd/main
  git -C {{ports_main}} clean -f

# update main ports tree based on latest pkg-status build
update-main-built:
  #!/bin/sh
  set -e
  commit=$(fetch -q -o - 'https://pkg-status.freebsd.org/api/1/builds?type=package&jailname=132amd64' | fx 'x["builds"].find(b => b.mastername == "132amd64-default" && b.status == "done:").buildname')
  if [ -z $commit ]; then
    echo 'Could not find completed build. Still running? https://pkg-status.freebsd.org/' 1>&2
    exit 1
  fi
  local_commit=$(git -C {{ports_main}} rev-parse --short HEAD)
  if [ "$local_commit" != "$commit" ]; then
    git -C {{ports_main}} remote update freebsd && git -C {{ports_main}} reset --hard $commit
    touch {{ports_main}}
  fi

# [-i] (launch jail); [-t] (test port)
build *args:
  #!/bin/sh
  set -e
  category=$(realpath {{invocation_directory()}}/.. | xargs basename)
  portname=$(basename {{invocation_directory()}})
  poudriere bulk -z maintain_dev -j 132amd64 -p main -O maintain -b latest -C {{args}} ${category}/${portname}

build-all:
  poudriere bulk -z maintain -j 132amd64 -p main -O maintain -b latest -C $(find ports -type d -depth 2 | sed -e 's|^ports/||')


# create a new git commit patch
[no-cd]
format-patch:
  #!/bin/sh
  set -e
  gitdir=$(realpath {{justfile_directory()}}/../freebsd-ports.git)
  maindir=$gitdir/main
  category=$(realpath {{invocation_directory()}}/.. | xargs basename)
  portname=$(basename {{invocation_directory()}})
  scratchdir=$gitdir/scratch-$portname
  messagefile={{invocation_directory()}}/git-message

  if [ ! -f $messagefile ]; then
    echo "error: write a commit message in git-message before formatting patch"
    exit 1
  fi

  if [ -d $scratchdir ]; then
    echo "error: $scratchdir exists. delete before running format-patch"
    exit 1
  fi

  rm -f {{justfile_directory()}}/patches/$portname.patch
  git -C $maindir worktree add --detach --no-checkout $scratchdir main
  git -C $scratchdir sparse-checkout set $category/$portname
  git -C $scratchdir reset --hard main
  rm -rf $scratchdir/$category/$portname
  mkdir $scratchdir/$category/$portname
  cp -Rp {{invocation_directory()}}/* $scratchdir/$category/$portname
  rm -f $scratchdir/$category/$portname/git-message
  git -C $scratchdir add $category/$portname
  git -C $scratchdir commit -F $messagefile
  git -C $scratchdir format-patch -o {{invocation_directory()}} main
  mv {{invocation_directory()}}/0001-*.patch {{justfile_directory()}}/patches/$portname.patch
  git -C $maindir worktree remove scratch-$portname

# create a new, empty git-message file
[no-cd]
git-message:
  #!/bin/sh
  set -e
  category=$(realpath {{invocation_directory()}}/.. | xargs basename)
  portname=$(basename {{invocation_directory()}})

  echo "$category/$portname: " > {{invocation_directory()}}/git-message

# fetch committed port definition files from git
[no-cd]
fetch-port *files="Makefile distinfo pkg-descr":
  #!/bin/sh
  set -e
  category=$(realpath {{invocation_directory()}}/.. | xargs basename)
  portname=$(basename {{invocation_directory()}})
  for f in {{files}}; do fetch -q https://cgit.freebsd.org/ports/plain/$category/$portname/$f; done

# fetch distfiles and patches to distfiles/
[no-cd]
fetch-distfiles:
  DISTDIR={{justfile_directory()}}/distfiles make fetch

# update distinfo checksum
[no-cd]
makesum:
  DISTDIR={{justfile_directory()}}/distfiles make makesum

# list pending patches
@patches:
  find -s . -name '*.patch' | sed -e 's|^./||'

# list outdated ports, according to portscout
status:
  #!/bin/sh
  set -e
  outdated=""
  pending=""
  wip=""

  fetch -q -o - 'https://portscout.freebsd.org/rss/rss.cgi?m=pat@patmaddox.com' | grep '^<title>.*-&#x3E;' | sed -e 's/^<title>//' -e 's/: .* -&#x3E;//' -e 's|</title>||' | sort > outdated
  while read portline; do
    port=$(echo "$portline" | awk '{print $1}')
    vers=$(echo "$portline" | awk '{print $2}')
    if grep -q "PORTVERSION=.*$vers" {{justfile_directory()}}/$port/Makefile; then
      if ls {{justfile_directory()}}/$port/*.patch > /dev/null 2>&1; then
        pending="$pending$portline\n"
      else
        wip="$wip$portline\n"
      fi
    else
      outdated="$outdated$portline\n"
    fi
  done < outdated
  rm -f outdated

  if [ -n "$outdated" ]; then
    echo "Outdated:"
    printf "$outdated" | sed -e 's/^/  /'
  fi

  if [ -n "$wip" ]; then
    echo "WIP:"
    printf "$wip" | sed -e 's/^/  /'
  fi

  if [ -n "$pending" ]; then
    echo "Pending:"
    printf "$pending" | sed -e 's/^/  /'
  fi

[no-cd]
lint:
  rm -f *~
  GIT_DIR=$PORTSDIR/.git PL_GIT_IGNORE='.*' portlint -A
  portfmt -i Makefile

Added ports/maintain/README.md.

























>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
# maintaining ports

`just status` to list outdated ports

1. `just makesum`
2. review dependencies
3. `doas just build -t`

If necessary:

1. `doas just build -i`
2. `cd /overlays/maintain/<path-to-port>`

Added ports/maintain/ports/databases/py-dbt-core/Makefile.





















































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
PORTNAME=	dbt-core
PORTVERSION=	1.7.3
PORTREVISION=	1
CATEGORIES=	databases python
MASTER_SITES=	PYPI
PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}

MAINTAINER=	pat@patmaddox.com
COMMENT=	With dbt, build analytics the way engineers build applications
WWW=		https://github.com/dbt-labs/dbt-core

LICENSE=	APACHE20

RUN_DEPENDS=	${PYTHON_PKGNAMEPREFIX}agate>=1.7,<2:textproc/py-agate@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}Jinja2>=3.1.2,<4:devel/py-Jinja2@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}mashumaro>=3.9,<4:devel/py-mashumaro@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}msgpack>0.5.6:devel/py-msgpack@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}logbook>0:devel/py-logbook@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}click>=8.0.2,<9:devel/py-click@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}networkx>=2.3:math/py-networkx@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}colorama>=0.3.9,<0.5:devel/py-colorama@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}pathspec>=0.9,<0.13:devel/py-pathspec@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}isodate>=0.6,<0.7:devel/py-isodate@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}sqlparse>=0.2.3,<0.5:databases/py-sqlparse@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}dbt-extractor>=0.5:devel/py-dbt-extractor@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}minimal-snowplow-tracker>=0.0.2,<0.1:devel/py-minimal-snowplow-tracker@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}dbt-semantic-interfaces>=0.4.0,<0.5:databases/py-dbt-semantic-interfaces@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}jsonschema>=3.0:devel/py-jsonschema@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}packaging>20.9:devel/py-packaging@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}protobuf>=4.0.0:devel/py-protobuf@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}pytz>=2015.7:devel/py-pytz@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}yaml>=6.0:devel/py-yaml@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}typing-extensions>=3.7.4:devel/py-typing-extensions@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}cffi>=1.9,<2:devel/py-cffi@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}idna>=2.5,<4:dns/py-idna@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}requests<3:www/py-requests@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}urllib3>=1.0:net/py-urllib3@${PY_FLAVOR}

USES=		python
USE_PYTHON=	autoplist concurrent distutils

.include <bsd.port.mk>

Added ports/maintain/ports/databases/py-dbt-core/distinfo.







>
>
>
1
2
3
TIMESTAMP = 1701374624
SHA256 (dbt-core-1.7.3.tar.gz) = a8f06203cd003ebcdda2af953f60c2b44fc0c69bd6cba79338225daedb1f41e1
SIZE (dbt-core-1.7.3.tar.gz) = 913570

Added ports/maintain/ports/databases/py-dbt-core/pkg-descr.























>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
dbt enables data analysts and engineers to transform their data using the same
practices that software engineers use to build applications.

Analysts using dbt can transform their data by simply writing select statements,
while dbt handles turning these statements into tables and views in a data
warehouse.

These select statements, or "models", form a dbt project. Models frequently
build on top of one another - dbt makes it easy to manage relationships between
models, and visualize these relationships, as well as assure the quality of your
transformations through testing.

Added ports/maintain/ports/databases/py-dbt-duckdb/Makefile.









































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
PORTNAME=	dbt-duckdb
PORTVERSION=	1.7.0
CATEGORIES=	databases python
MASTER_SITES=	PYPI
PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}

MAINTAINER=	pat@patmaddox.com
COMMENT=	DuckDB adapter plugin for dbt (data build tool)
WWW=		https://github.com/jwills/dbt-duckdb

LICENSE=	APACHE20

RUN_DEPENDS=	${PYTHON_PKGNAMEPREFIX}dbt-core>=1.7.0,<2:databases/py-dbt-core@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}duckdb>=0.7.0:databases/py-duckdb@${PY_FLAVOR}

USES=		python
USE_PYTHON=	autoplist concurrent distutils

# py-dbt-duckdb attempts to install __init__.py and the generated bytecode into
# these directories which conflicts with those installed by py-dbt-core. We need to
# delete these files and their entries in the generated plist post-install.
COMMON_DIRS=	dbt \
		dbt/adapters \
		dbt/include

post-stage:
.for dir in ${COMMON_DIRS:S|/|\/|g}
	@${REINPLACE_CMD} -e '/.*\/${dir}\/__init__.py/d' \
		-e '/.*\/${dir}\/__pycache__.*/d' \
		${WRKDIR}/.PLIST.pymodtmp
.endfor
.for dir in ${COMMON_DIRS}
	@(cd ${STAGEDIR}${PYTHON_SITELIBDIR}/${dir} && ${RM} -r __pycache__ __init__.py)
.endfor

.include <bsd.port.mk>

Added ports/maintain/ports/databases/py-dbt-duckdb/distinfo.







>
>
>
1
2
3
TIMESTAMP = 1701384521
SHA256 (dbt-duckdb-1.7.0.tar.gz) = e2479f5fdd5177eea37c081d8ea126d80ec51d95e69ea31132ade396538bf0a0
SIZE (dbt-duckdb-1.7.0.tar.gz) = 49217

Added ports/maintain/ports/databases/py-dbt-duckdb/pkg-descr.























>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
dbt-duckdb

DuckDB is an embedded database, similar to SQLite, but designed for OLAP-style
analytics. It is crazy fast and allows you to read and write data stored in CSV,
JSON, and Parquet files directly, without requiring you to load them into the
database first.

dbt is the best way to manage a collection of data transformations written in
SQL or Python for analytics and data science. dbt-duckdb is the project that
ties DuckDB and dbt together, allowing you to create a Modern Data Stack In A
Box or a simple and powerful data lakehouse with Python.

Added ports/maintain/ports/databases/py-dbt-semantic-interfaces/Makefile.





























































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
PORTNAME=	dbt-semantic-interfaces
PORTVERSION=	0.4.1
CATEGORIES=	databases python
MASTER_SITES=	PYPI
PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}
DISTNAME=	dbt_semantic_interfaces-${PORTVERSION}

MAINTAINER=	pat@patmaddox.com
COMMENT=	Shared semantic layer definitions that dbt-core and MetricFlow use
WWW=		https://github.com/dbt-labs/dbt-semantic-interfaces

LICENSE=	APACHE20

BUILD_DEPENDS=	hatch:devel/py-hatch@${PY_FLAVOR} \
		hatchling:devel/py-hatchling@${PY_FLAVOR}

RUN_DEPENDS=	${PYTHON_PKGNAMEPREFIX}pydantic>=1.10,<2:devel/py-pydantic@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}jsonschema>=4,<5:devel/py-jsonschema@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}yaml>=6,<7:devel/py-yaml@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}more-itertools>=8,<11:devel/py-more-itertools@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}Jinja2>=3,<4:devel/py-Jinja2@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}click>=7,<9:devel/py-click@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}dateutil>=2,<3:devel/py-dateutil@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}importlib-metadata>=6:devel/py-importlib-metadata@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}typing-extensions>=4.4,<5:devel/py-typing-extensions@${PY_FLAVOR}

USES=		python
USE_PYTHON=	autoplist concurrent pep517

.include <bsd.port.mk>

Added ports/maintain/ports/databases/py-dbt-semantic-interfaces/distinfo.







>
>
>
1
2
3
TIMESTAMP = 1701246479
SHA256 (dbt_semantic_interfaces-0.4.1.tar.gz) = d79cb4b5e5103099874530735a117db61f7dcbb1a774f9ce65068c24cb7e874e
SIZE (dbt_semantic_interfaces-0.4.1.tar.gz) = 72398

Added ports/maintain/ports/databases/py-dbt-semantic-interfaces/pkg-descr.



























>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
dbt-semantic-interfaces contains the shared semantic classes, default
validation, and tests designed to be used by both the dbt-core and MetricFlow
projects. By centralizing these shared resources, we aim to maintain consistency
and reduce code duplication across both projects.

Features:

- Protocols for shared semantic classes: Define the interfaces and common
  attributes that must be implemented by the objects in both projects.
- Validation: Ensure that the objects comply with the expected structure and
  constraints.
- Tests: Ensure that the objects' behavior is consistent and correct across both
  projects.

Added ports/maintain/ports/databases/py-dbt-snowflake/Makefile.













































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
PORTNAME=	dbt-snowflake
PORTVERSION=	1.7.0
CATEGORIES=	databases python
MASTER_SITES=	PYPI
PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}

MAINTAINER=	pat@patmaddox.com
COMMENT=	Snowflake adapter plugin for dbt
WWW=		https://github.com/dbt-labs/dbt-snowflake

LICENSE=	APACHE20

RUN_DEPENDS=	${PYTHON_PKGNAMEPREFIX}agate>0:textproc/py-agate@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}dbt-core>=1.7.3,<2:databases/py-dbt-core@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}snowflake-connector-python>=3.0,<4:databases/py-snowflake-connector-python@${PY_FLAVOR} \
	        ${PYTHON_PKGNAMEPREFIX}keyring>=16.1.1:security/py-keyring@${PY_FLAVOR}

USES=		python
USE_PYTHON=	autoplist concurrent distutils

# py-dbt-snowflake attempts to install __init__.py and the generated bytecode into
# these directories which conflicts with those installed by py-dbt-core. We need to
# delete these files and their entries in the generated plist post-install.
COMMON_DIRS=	dbt \
		dbt/adapters \
		dbt/include

post-stage:
.for dir in ${COMMON_DIRS:S|/|\/|g}
	@${REINPLACE_CMD} -e '/.*\/${dir}\/__init__.py/d' \
		-e '/.*\/${dir}\/__pycache__.*/d' \
		${WRKDIR}/.PLIST.pymodtmp
.endfor
.for dir in ${COMMON_DIRS}
	@(cd ${STAGEDIR}${PYTHON_SITELIBDIR}/${dir} && ${RM} -r __pycache__ __init__.py)
.endfor

.include <bsd.port.mk>

Added ports/maintain/ports/databases/py-dbt-snowflake/distinfo.







>
>
>
1
2
3
TIMESTAMP = 1701384077
SHA256 (dbt-snowflake-1.7.0.tar.gz) = 959943cdb0bcedc67203f0205a9ae21fba13f4c95ddf7089848a77b8c8a8d5f1
SIZE (dbt-snowflake-1.7.0.tar.gz) = 33622

Added ports/maintain/ports/databases/py-dbt-snowflake/pkg-descr.





>
>
1
2
The dbt-snowflake package contains all of the code enabling dbt to work with
Snowflake.

Added ports/maintain/ports/databases/py-duckdb/Makefile.





























































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
PORTNAME=	duckdb
DISTVERSION=	0.9.2
CATEGORIES=	databases python
MASTER_SITES=	PYPI
PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}

MAINTAINER=	yuri@FreeBSD.org
COMMENT=	In-process SQL OLAP database management system
WWW=		https://duckdb.org/

LICENSE=	MIT

BUILD_DEPENDS=	${PYTHON_PKGNAMEPREFIX}setuptools_scm>0:devel/py-setuptools_scm@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}pybind11>=2.6.0:devel/py-pybind11@${PY_FLAVOR} \
		${PYNUMPY}
RUN_DEPENDS=	${PYNUMPY} \
		${PYTHON_PKGNAMEPREFIX}pandas>0:math/py-pandas@${PY_FLAVOR}
TEST_DEPENDS=	${PYTHON_PKGNAMEPREFIX}mypy>0:devel/py-mypy@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}google-cloud-storage>0:www/py-google-cloud-storage@${PY_FLAVOR}

USES=		python
USE_PYTHON=	distutils autoplist pytest

TEST_WRKSRC=	${WRKDIR} # tests fail when run from the build directory due to a name collision with duckdb directory
TEST_ENV=	${MAKE_ENV} PYTHONPATH=${STAGEDIR}${PYTHONPREFIX_SITELIBDIR} # 2 tests fail, see https://github.com/duckdb/duckdb/issues/5642

post-install:
	@${STRIP_CMD} ${STAGEDIR}${PYTHON_SITELIBDIR}/duckdb/duckdb.cpython-${PYTHON_SUFFIX}.so

.include <bsd.port.mk>

Added ports/maintain/ports/databases/py-duckdb/distinfo.







>
>
>
1
2
3
TIMESTAMP = 1700026657
SHA256 (duckdb-0.9.2.tar.gz) = 3843afeab7c3fc4a4c0b53686a4cc1d9cdbdadcbb468d60fef910355ecafd447
SIZE (duckdb-0.9.2.tar.gz) = 10653450

Added ports/maintain/ports/databases/py-duckdb/files/patch-setup.py.



























>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
- allow to pass a newer setuptools_scm

--- setup.py.orig	2023-11-14 09:44:27 UTC
+++ setup.py
@@ -349,7 +349,7 @@ setup(
     packages=packages,
     include_package_data=True,
     python_requires='>=3.7.0',
-    setup_requires=setup_requires + ["setuptools_scm<7.0.0", 'pybind11>=2.6.0'],
+    setup_requires=setup_requires + ["setuptools_scm", 'pybind11>=2.6.0'],
     use_scm_version=setuptools_scm_conf,
     tests_require=['google-cloud-storage', 'mypy', 'pytest'],
     classifiers=[

Added ports/maintain/ports/databases/py-duckdb/pkg-descr.











>
>
>
>
>
1
2
3
4
5
DuckDB is a high-performance analytical database system. It is
designed to be fast, reliable and easy to use. DuckDB provides a rich
SQL dialect, with support far beyond basic SQL. DuckDB supports
arbitrary and nested correlated subqueries, window functions,
collations, complex types (arrays, structs), and more.

Added ports/maintain/ports/databases/py-schemachange/Makefile.



































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
PORTNAME=	schemachange
PORTVERSION=	3.6.1
CATEGORIES=	databases python
MASTER_SITES=	PYPI
PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}

MAINTAINER=	pat@patmaddox.com
COMMENT=	Database Change Management tool for Snowflake
WWW=		https://github.com/Snowflake-Labs/schemachange

LICENSE=	APACHE20

BUILD_DEPENDS=	${PY_DEPENDS} \
		${PYTHON_PKGNAMEPREFIX}setuptools>=40.9.0:devel/py-setuptools@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}wheel>0:devel/py-wheel@${PY_FLAVOR}

RUN_DEPENDS=	${PY_DEPENDS}

TEST_DEPENDS=	${PY_DEPENDS} \
		${PYTHON_PKGNAMEPREFIX}pytest>0:devel/py-pytest@${PY_FLAVOR}

USES=		python
USE_PYTHON=	autoplist concurrent pep517

PY_DEPENDS=	${PYTHON_PKGNAMEPREFIX}Jinja2>=3.0,<4:devel/py-Jinja2@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}pandas>=1.3:math/py-pandas@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}yaml>=6.0,<7:devel/py-yaml@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}snowflake-connector-python>=2.8,<4:databases/py-snowflake-connector-python@${PY_FLAVOR}

do-test:
	@(cd ${TEST_WRKSRC} && ${SETENV} ${TEST_ENV} ${PYTHON_CMD} -m pytest)

.include <bsd.port.mk>

Added ports/maintain/ports/databases/py-schemachange/distinfo.







>
>
>
1
2
3
TIMESTAMP = 1701373452
SHA256 (schemachange-3.6.1.tar.gz) = 2609f4fa654a4fe71e5057200e5c7687d12c931148fe8116eb76b93c5ff86f57
SIZE (schemachange-3.6.1.tar.gz) = 49139

Added ports/maintain/ports/databases/py-schemachange/pkg-descr.













>
>
>
>
>
>
1
2
3
4
5
6
schemachange is a simple python based tool to manage all of your Snowflake
objects. It follows an Imperative-style approach to Database Change Management
(DCM) and was inspired by the Flyway database migration tool. When combined with
a version control system and a CI/CD tool, database changes can be approved and
deployed through a pipeline using modern software delivery practices. As such
schemachange plays a critical role in enabling Database (or Data) DevOps.

Added ports/maintain/ports/databases/py-snowddl/Makefile.





















































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
PORTNAME=	snowddl
PORTVERSION=	0.22.0
CATEGORIES=	databases python
MASTER_SITES=	PYPI
PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}

MAINTAINER=	pat@patmaddox.com
COMMENT=	Object management automation tool for Snowflake
WWW=		https://docs.snowddl.com/

LICENSE=	APACHE20

BUILD_DEPENDS=	wheel-${PYTHON_VER}:devel/py-wheel@${PY_FLAVOR} \
		${PY_DEPENDS}

RUN_DEPENDS=	${PY_DEPENDS}

USES=		python
USE_PYTHON=	autoplist concurrent pep517

PY_DEPENDS=	${PYTHON_PKGNAMEPREFIX}jsonschema>=4.18,<5:devel/py-jsonschema@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}pydantic2>=2.4,<3:devel/py-pydantic2@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}yaml>=6.0,<7:devel/py-yaml@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}snowflake-connector-python>=3.0,<4:databases/py-snowflake-connector-python@${PY_FLAVOR}

.include <bsd.port.mk>

Added ports/maintain/ports/databases/py-snowddl/distinfo.







>
>
>
1
2
3
TIMESTAMP = 1701373050
SHA256 (snowddl-0.22.0.tar.gz) = 05910499388fbf36983fa4d44c44060c4bfdcf1bce7172a61b6cc728c70ea65c
SIZE (snowddl-0.22.0.tar.gz) = 107057

Added ports/maintain/ports/databases/py-snowddl/pkg-descr.





























>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
SnowDDL is a declarative-style tool for object management automation in
Snowflake.

It is not intended to replace other tools entirely, but to provide an
alternative approach focused on practical data engineering challenges.

You may find SnowDDL useful if:

- complexity of object schema grows exponentially, and it becomes hard to manage
- your organization maintains multiple Snowflake accounts (dev, stage, prod)
- your organization has multiple developers sharing the same Snowflake account
  and suffering from conflicts
- it is necessary to generate some part of configuration dynamically using
  Python

Added ports/maintain/ports/databases/py-snowflake-connector-python/Makefile.



















































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
PORTNAME=	snowflake-connector-python
PORTVERSION=	3.5.0
CATEGORIES=	databases python
MASTER_SITES=	PYPI
PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}

MAINTAINER=	pat@patmaddox.com
COMMENT=	Snowflake Connector for Python
WWW=		https://github.com/snowflakedb/snowflake-connector-python

LICENSE=	APACHE20

BUILD_DEPENDS=	${PY_DEPENDS} \
		${PYTHON_PKGNAMEPREFIX}cython3>0:lang/cython3@${PY_FLAVOR}

RUN_DEPENDS=	${PY_DEPENDS}

USES=		python
USE_PYTHON=	autoplist concurrent distutils

PY_DEPENDS=	${PYTHON_PKGNAMEPREFIX}asn1crypto>0.24.0,<2.0.0:devel/py-asn1crypto@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}cffi>=1.9,<2:devel/py-cffi@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}cryptography>=3.1.0:security/py-cryptography@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}oscrypto<2.0:security/py-oscrypto@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}openssl>=16.2.0:security/py-openssl@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}pyjwt<3.0:www/py-pyjwt@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}pytz>0:devel/py-pytz@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}requests<3.0:www/py-requests@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}importlib-metadata>0:devel/py-importlib-metadata@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}packaging>0:devel/py-packaging@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}charset-normalizer>=2,<4:textproc/py-charset-normalizer@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}idna>=2.5,<4.0:dns/py-idna@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}urllib3>=1.21.1:net/py-urllib3@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}certifi>=2017.4.17:security/py-certifi@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}typing-extensions>=4.3,<5.0:devel/py-typing-extensions@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}filelock>=3.5,<4.0:sysutils/py-filelock@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}sortedcontainers>=2.4.0:devel/py-sortedcontainers@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}platformdirs>=2.6.0,<4.0.0:devel/py-platformdirs@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}tomlkit>0:textproc/py-tomlkit@${PY_FLAVOR}

.include <bsd.port.mk>

Added ports/maintain/ports/databases/py-snowflake-connector-python/distinfo.







>
>
>
1
2
3
TIMESTAMP = 1701367506
SHA256 (snowflake-connector-python-3.5.0.tar.gz) = 654e4a1f68a491544bd8f7c5ab02eb8531df67c5f4309d5253bd204044f8a1b3
SIZE (snowflake-connector-python-3.5.0.tar.gz) = 702274

Added ports/maintain/ports/databases/py-snowflake-connector-python/files/patch-src_snowflake_connector_nanoarrow__cpp_ArrowIterator_nanoarrow__ipc.c.



























































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
--- src/snowflake/connector/nanoarrow_cpp/ArrowIterator/nanoarrow_ipc.c.orig	2023-11-30 19:17:00 UTC
+++ src/snowflake/connector/nanoarrow_cpp/ArrowIterator/nanoarrow_ipc.c
@@ -21239,7 +21239,7 @@ static inline int ArrowIpcDecoderReadHeaderPrefix(stru
 
   if (header_body_size_bytes == 0) {
     ArrowErrorSet(error, "End of Arrow stream");
-    return ENODATA;
+    return ENOATTR;
   }
 
   return NANOARROW_OK;
@@ -22194,7 +22194,7 @@ static int ArrowIpcArrayStreamReaderNextHeader(
     // is one of the valid outcomes) but we set the error anyway in case it gets
     // propagated higher (e.g., if the stream is empty and there's no schema message)
     ArrowErrorSet(&private_data->error, "No data available on stream");
-    return ENODATA;
+    return ENOATTR;
   } else if (bytes_read != 8) {
     ArrowErrorSet(&private_data->error,
                   "Expected at least 8 bytes in remainder of stream");
@@ -22338,7 +22338,7 @@ static int ArrowIpcArrayStreamReaderGetNext(struct Arr
   // Read + decode the next header
   int result = ArrowIpcArrayStreamReaderNextHeader(
       private_data, NANOARROW_IPC_MESSAGE_TYPE_RECORD_BATCH);
-  if (result == ENODATA) {
+  if (result == ENOATTR) {
     // Stream is finished either because there is no input or because
     // end of stream bytes were read.
     out->release = NULL;

Added ports/maintain/ports/databases/py-snowflake-connector-python/files/patch-src_snowflake_connector_nanoarrow__cpp_ArrowIterator_nanoarrow__ipc.h.























>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
--- src/snowflake/connector/nanoarrow_cpp/ArrowIterator/nanoarrow_ipc.h.orig	2023-11-30 19:16:47 UTC
+++ src/snowflake/connector/nanoarrow_cpp/ArrowIterator/nanoarrow_ipc.h
@@ -193,7 +193,7 @@ void ArrowIpcDecoderReset(struct ArrowIpcDecoder* deco
 /// The first 8 bytes of an Arrow IPC message are 0xFFFFFF followed by the size
 /// of the header as a little-endian 32-bit integer. ArrowIpcDecoderPeekHeader() reads
 /// these bytes and returns ESPIPE if there are not enough remaining bytes in data to read
-/// the entire header message, EINVAL if the first 8 bytes are not valid, ENODATA if the
+/// the entire header message, EINVAL if the first 8 bytes are not valid, ENOATTR if the
 /// Arrow end-of-stream indicator has been reached, or NANOARROW_OK otherwise.
 ArrowErrorCode ArrowIpcDecoderPeekHeader(struct ArrowIpcDecoder* decoder,
                                          struct ArrowBufferView data,

Added ports/maintain/ports/databases/py-snowflake-connector-python/pkg-descr.























>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
The Snowflake Connector for Python conforms to the Python DB API 2.0
specification.

The Snowflake Connector for Python provides an interface for developing Python
applications that can connect to Snowflake and perform all standard
operations. It provides a programming alternative to developing applications in
Java or C/C++ using the Snowflake JDBC or ODBC drivers.

The connector has no dependencies on JDBC or ODBC. It can be installed using pip
on Linux, Mac OSX, and Windows platforms where Python 3.8.0 (or higher) is
installed.

Added ports/maintain/ports/devel/py-dbt-extractor/Makefile.







































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
PORTNAME=	dbt-extractor
PORTVERSION=	0.5.1
CATEGORIES=	devel python
MASTER_SITES=	PYPI
PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}
DISTNAME=	dbt_extractor-${PORTVERSION}
DISTFILES=	${DISTNAME}.tar.gz

MAINTAINER=	pat@patmaddox.com
COMMENT=	Analyze and extract information from Jinja used in dbt projects
WWW=		https://github.com/dbt-labs/dbt-extractor/

LICENSE=	APACHE20

BUILD_DEPENDS=	maturin:devel/py-maturin@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}tree-sitter>0:devel/py-tree-sitter@${PY_FLAVOR}

USES=		cargo python
USE_PYTHON=	autoplist concurrent pep517

CARGO_BUILD=	no
CARGO_INSTALL=	no

post-extract:
	${LN} -s ${WRKDIR}/tree-sitter-jinja2-*/ ${WRKSRC}/tree-sitter-dbt-jinja

pre-build:
	cd ${WRKSRC} && maturin build -f
	${MKDIR} ${WRKSRC}/dist
	${CP} ${WRKSRC}/target/wheels/${DISTNAME}-*freebsd*.whl ${WRKSRC}/dist/

do-test:
	@(cd ${TEST_WRKSRC} && ${SETENV} ${TEST_ENV} PYO3_PYTHON=${PYTHON_CMD} ${CARGO} test)

.include <bsd.port.mk>

Added ports/maintain/ports/devel/py-dbt-extractor/Makefile.crates.

























































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
CARGO_CRATES=	aho-corasick-1.0.2 \
		autocfg-1.1.0 \
		bitflags-1.3.2 \
		cc-1.0.79 \
		cfg-if-1.0.0 \
		crossbeam-channel-0.5.8 \
		crossbeam-deque-0.8.3 \
		crossbeam-epoch-0.9.15 \
		crossbeam-utils-0.8.16 \
		either-1.8.1 \
		env_logger-0.8.4 \
		getrandom-0.2.10 \
		hermit-abi-0.3.2 \
		indoc-1.0.9 \
		libc-0.2.147 \
		lock_api-0.4.10 \
		log-0.4.19 \
		memchr-2.5.0 \
		memoffset-0.9.0 \
		num_cpus-1.16.0 \
		once_cell-1.18.0 \
		parking_lot-0.12.1 \
		parking_lot_core-0.9.8 \
		proc-macro2-1.0.66 \
		pyo3-0.19.1 \
		pyo3-build-config-0.19.1 \
		pyo3-ffi-0.19.1 \
		pyo3-macros-0.19.1 \
		pyo3-macros-backend-0.19.1 \
		quickcheck-1.0.3 \
		quickcheck_macros-1.0.0 \
		quote-1.0.31 \
		rand-0.8.5 \
		rand_core-0.6.4 \
		rayon-1.7.0 \
		rayon-core-1.11.0 \
		redox_syscall-0.3.5 \
		regex-1.9.1 \
		regex-automata-0.3.3 \
		regex-syntax-0.7.4 \
		scopeguard-1.2.0 \
		smallvec-1.11.0 \
		syn-1.0.109 \
		syn-2.0.26 \
		target-lexicon-0.12.10 \
		thiserror-1.0.43 \
		thiserror-impl-1.0.43 \
		tree-sitter-0.20.10 \
		unicode-ident-1.0.11 \
		unindent-0.1.11 \
		wasi-0.11.0+wasi-snapshot-preview1 \
		windows-targets-0.48.1 \
		windows_aarch64_gnullvm-0.48.0 \
		windows_aarch64_msvc-0.48.0 \
		windows_i686_gnu-0.48.0 \
		windows_i686_msvc-0.48.0 \
		windows_x86_64_gnu-0.48.0 \
		windows_x86_64_gnullvm-0.48.0 \
		windows_x86_64_msvc-0.48.0 \
		tree-sitter-jinja2@git+https://github.com/dbt-labs/tree-sitter-jinja2?tag=v0.2.0\#c9b092eff38bd6943254ad0373006d83c100a8c0

Added ports/maintain/ports/devel/py-dbt-extractor/distinfo.























































































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
TIMESTAMP = 1701377668
SHA256 (dbt_extractor-0.5.1.tar.gz) = cd5d95576a8dea4190240aaf9936a37fd74b4b7913ca69a3c368fc4472bb7e13
SIZE (dbt_extractor-0.5.1.tar.gz) = 266278
SHA256 (rust/crates/aho-corasick-1.0.2.crate) = 43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41
SIZE (rust/crates/aho-corasick-1.0.2.crate) = 167694
SHA256 (rust/crates/autocfg-1.1.0.crate) = d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa
SIZE (rust/crates/autocfg-1.1.0.crate) = 13272
SHA256 (rust/crates/bitflags-1.3.2.crate) = bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a
SIZE (rust/crates/bitflags-1.3.2.crate) = 23021
SHA256 (rust/crates/cc-1.0.79.crate) = 50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f
SIZE (rust/crates/cc-1.0.79.crate) = 62624
SHA256 (rust/crates/cfg-if-1.0.0.crate) = baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd
SIZE (rust/crates/cfg-if-1.0.0.crate) = 7934
SHA256 (rust/crates/crossbeam-channel-0.5.8.crate) = a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200
SIZE (rust/crates/crossbeam-channel-0.5.8.crate) = 90455
SHA256 (rust/crates/crossbeam-deque-0.8.3.crate) = ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef
SIZE (rust/crates/crossbeam-deque-0.8.3.crate) = 21746
SHA256 (rust/crates/crossbeam-epoch-0.9.15.crate) = ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7
SIZE (rust/crates/crossbeam-epoch-0.9.15.crate) = 48553
SHA256 (rust/crates/crossbeam-utils-0.8.16.crate) = 5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294
SIZE (rust/crates/crossbeam-utils-0.8.16.crate) = 42508
SHA256 (rust/crates/either-1.8.1.crate) = 7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91
SIZE (rust/crates/either-1.8.1.crate) = 16027
SHA256 (rust/crates/env_logger-0.8.4.crate) = a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3
SIZE (rust/crates/env_logger-0.8.4.crate) = 33342
SHA256 (rust/crates/getrandom-0.2.10.crate) = be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427
SIZE (rust/crates/getrandom-0.2.10.crate) = 34955
SHA256 (rust/crates/hermit-abi-0.3.2.crate) = 443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b
SIZE (rust/crates/hermit-abi-0.3.2.crate) = 13783
SHA256 (rust/crates/indoc-1.0.9.crate) = bfa799dd5ed20a7e349f3b4639aa80d74549c81716d9ec4f994c9b5815598306
SIZE (rust/crates/indoc-1.0.9.crate) = 13475
SHA256 (rust/crates/libc-0.2.147.crate) = b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3
SIZE (rust/crates/libc-0.2.147.crate) = 686772
SHA256 (rust/crates/lock_api-0.4.10.crate) = c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16
SIZE (rust/crates/lock_api-0.4.10.crate) = 26713
SHA256 (rust/crates/log-0.4.19.crate) = b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4
SIZE (rust/crates/log-0.4.19.crate) = 38073
SHA256 (rust/crates/memchr-2.5.0.crate) = 2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d
SIZE (rust/crates/memchr-2.5.0.crate) = 65812
SHA256 (rust/crates/memoffset-0.9.0.crate) = 5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c
SIZE (rust/crates/memoffset-0.9.0.crate) = 9033
SHA256 (rust/crates/num_cpus-1.16.0.crate) = 4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43
SIZE (rust/crates/num_cpus-1.16.0.crate) = 15713
SHA256 (rust/crates/once_cell-1.18.0.crate) = dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d
SIZE (rust/crates/once_cell-1.18.0.crate) = 32969
SHA256 (rust/crates/parking_lot-0.12.1.crate) = 3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f
SIZE (rust/crates/parking_lot-0.12.1.crate) = 40967
SHA256 (rust/crates/parking_lot_core-0.9.8.crate) = 93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447
SIZE (rust/crates/parking_lot_core-0.9.8.crate) = 32383
SHA256 (rust/crates/proc-macro2-1.0.66.crate) = 18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9
SIZE (rust/crates/proc-macro2-1.0.66.crate) = 43575
SHA256 (rust/crates/pyo3-0.19.1.crate) = ffb88ae05f306b4bfcde40ac4a51dc0b05936a9207a4b75b798c7729c4258a59
SIZE (rust/crates/pyo3-0.19.1.crate) = 418297
SHA256 (rust/crates/pyo3-build-config-0.19.1.crate) = 554db24f0b3c180a9c0b1268f91287ab3f17c162e15b54caaae5a6b3773396b0
SIZE (rust/crates/pyo3-build-config-0.19.1.crate) = 29205
SHA256 (rust/crates/pyo3-ffi-0.19.1.crate) = 922ede8759e8600ad4da3195ae41259654b9c55da4f7eec84a0ccc7d067a70a4
SIZE (rust/crates/pyo3-ffi-0.19.1.crate) = 64837
SHA256 (rust/crates/pyo3-macros-0.19.1.crate) = 8a5caec6a1dd355964a841fcbeeb1b89fe4146c87295573f94228911af3cc5a2
SIZE (rust/crates/pyo3-macros-0.19.1.crate) = 7173
SHA256 (rust/crates/pyo3-macros-backend-0.19.1.crate) = e0b78ccbb160db1556cdb6fd96c50334c5d4ec44dc5e0a968d0a1208fa0efa8b
SIZE (rust/crates/pyo3-macros-backend-0.19.1.crate) = 49916
SHA256 (rust/crates/quickcheck-1.0.3.crate) = 588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6
SIZE (rust/crates/quickcheck-1.0.3.crate) = 28069
SHA256 (rust/crates/quickcheck_macros-1.0.0.crate) = b22a693222d716a9587786f37ac3f6b4faedb5b80c23914e7303ff5a1d8016e9
SIZE (rust/crates/quickcheck_macros-1.0.0.crate) = 5275
SHA256 (rust/crates/quote-1.0.31.crate) = 5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0
SIZE (rust/crates/quote-1.0.31.crate) = 27881
SHA256 (rust/crates/rand-0.8.5.crate) = 34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404
SIZE (rust/crates/rand-0.8.5.crate) = 87113
SHA256 (rust/crates/rand_core-0.6.4.crate) = ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c
SIZE (rust/crates/rand_core-0.6.4.crate) = 22666
SHA256 (rust/crates/rayon-1.7.0.crate) = 1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b
SIZE (rust/crates/rayon-1.7.0.crate) = 169488
SHA256 (rust/crates/rayon-core-1.11.0.crate) = 4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d
SIZE (rust/crates/rayon-core-1.11.0.crate) = 73118
SHA256 (rust/crates/redox_syscall-0.3.5.crate) = 567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29
SIZE (rust/crates/redox_syscall-0.3.5.crate) = 23404
SHA256 (rust/crates/regex-1.9.1.crate) = b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575
SIZE (rust/crates/regex-1.9.1.crate) = 251978
SHA256 (rust/crates/regex-automata-0.3.3.crate) = 39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310
SIZE (rust/crates/regex-automata-0.3.3.crate) = 604301
SHA256 (rust/crates/regex-syntax-0.7.4.crate) = e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2
SIZE (rust/crates/regex-syntax-0.7.4.crate) = 343365
SHA256 (rust/crates/scopeguard-1.2.0.crate) = 94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49
SIZE (rust/crates/scopeguard-1.2.0.crate) = 11619
SHA256 (rust/crates/smallvec-1.11.0.crate) = 62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9
SIZE (rust/crates/smallvec-1.11.0.crate) = 34680
SHA256 (rust/crates/syn-1.0.109.crate) = 72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237
SIZE (rust/crates/syn-1.0.109.crate) = 237611
SHA256 (rust/crates/syn-2.0.26.crate) = 45c3457aacde3c65315de5031ec191ce46604304d2446e803d71ade03308d970
SIZE (rust/crates/syn-2.0.26.crate) = 241572
SHA256 (rust/crates/target-lexicon-0.12.10.crate) = 1d2faeef5759ab89935255b1a4cd98e0baf99d1085e37d36599c625dac49ae8e
SIZE (rust/crates/target-lexicon-0.12.10.crate) = 24544
SHA256 (rust/crates/thiserror-1.0.43.crate) = a35fc5b8971143ca348fa6df4f024d4d55264f3468c71ad1c2f365b0a4d58c42
SIZE (rust/crates/thiserror-1.0.43.crate) = 18735
SHA256 (rust/crates/thiserror-impl-1.0.43.crate) = 463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f
SIZE (rust/crates/thiserror-impl-1.0.43.crate) = 15062
SHA256 (rust/crates/tree-sitter-0.20.10.crate) = e747b1f9b7b931ed39a548c1fae149101497de3c1fc8d9e18c62c1a66c683d3d
SIZE (rust/crates/tree-sitter-0.20.10.crate) = 134502
SHA256 (rust/crates/unicode-ident-1.0.11.crate) = 301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c
SIZE (rust/crates/unicode-ident-1.0.11.crate) = 42067
SHA256 (rust/crates/unindent-0.1.11.crate) = e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c
SIZE (rust/crates/unindent-0.1.11.crate) = 7700
SHA256 (rust/crates/wasi-0.11.0+wasi-snapshot-preview1.crate) = 9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423
SIZE (rust/crates/wasi-0.11.0+wasi-snapshot-preview1.crate) = 28131
SHA256 (rust/crates/windows-targets-0.48.1.crate) = 05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f
SIZE (rust/crates/windows-targets-0.48.1.crate) = 6902
SHA256 (rust/crates/windows_aarch64_gnullvm-0.48.0.crate) = 91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc
SIZE (rust/crates/windows_aarch64_gnullvm-0.48.0.crate) = 366543
SHA256 (rust/crates/windows_aarch64_msvc-0.48.0.crate) = b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3
SIZE (rust/crates/windows_aarch64_msvc-0.48.0.crate) = 671479
SHA256 (rust/crates/windows_i686_gnu-0.48.0.crate) = 622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241
SIZE (rust/crates/windows_i686_gnu-0.48.0.crate) = 741490
SHA256 (rust/crates/windows_i686_msvc-0.48.0.crate) = 4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00
SIZE (rust/crates/windows_i686_msvc-0.48.0.crate) = 730056
SHA256 (rust/crates/windows_x86_64_gnu-0.48.0.crate) = ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1
SIZE (rust/crates/windows_x86_64_gnu-0.48.0.crate) = 703595
SHA256 (rust/crates/windows_x86_64_gnullvm-0.48.0.crate) = 7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953
SIZE (rust/crates/windows_x86_64_gnullvm-0.48.0.crate) = 366536
SHA256 (rust/crates/windows_x86_64_msvc-0.48.0.crate) = 1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a
SIZE (rust/crates/windows_x86_64_msvc-0.48.0.crate) = 671422
SHA256 (dbt-labs-tree-sitter-jinja2-v0.2.0_GH0.tar.gz) = 29ac60e54a40e4bc43d30b1efcc9901ad8cb1328a74f02b3236b63ba7ebbb1ba
SIZE (dbt-labs-tree-sitter-jinja2-v0.2.0_GH0.tar.gz) = 24053

Added ports/maintain/ports/devel/py-dbt-extractor/pkg-descr.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
A tool that processes the most common jinja value templates in dbt model
files. The tool depends on tree-sitter and the tree-sitter-jinja2 library.

The current strategy is for this processor to be 100% certain when it can
accurately extract values from a given model file. Anything less than 100%
certainty returns an exception so that the model can be rendered with python
Jinja instead.

Added ports/maintain/ports/devel/py-hologram/Makefile.







































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
PORTNAME=	hologram
PORTVERSION=	0.0.16
CATEGORIES=	devel python
MASTER_SITES=	PYPI
PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}

MAINTAINER=	pat@patmaddox.com
COMMENT=	JSON schema generation from dataclasses
WWW=		https://github.com/dbt-labs/hologram

LICENSE=	MIT

RUN_DEPENDS=	${PYTHON_PKGNAMEPREFIX}dateutil>=2.8,<2.9:devel/py-dateutil@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}jsonschema>=3.0:devel/py-jsonschema@${PY_FLAVOR}

USES=		python
USE_PYTHON=	autoplist concurrent distutils

.include <bsd.port.mk>

Added ports/maintain/ports/devel/py-hologram/distinfo.







>
>
>
1
2
3
TIMESTAMP = 1691524948
SHA256 (hologram-0.0.16.tar.gz) = 1c2c921b4e575361623ea0e0d0aa5aee377b1a333cc6c6a879e213ed34583e55
SIZE (hologram-0.0.16.tar.gz) = 18956

Added ports/maintain/ports/devel/py-hologram/pkg-descr.







>
>
>
1
2
3
Hologram is a lightweight library to generate Draft 7 compatible JSON Schemas
from python 3.7 dataclasses. Python 3.6 is supported through the dataclasses
backport. It is forked from dataclasses-jsonschema created by s-knibbs.

Added ports/maintain/ports/devel/py-mashumaro/Makefile.





































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
PORTNAME=	mashumaro
PORTVERSION=	3.11
CATEGORIES=	devel python
MASTER_SITES=	PYPI
PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}

MAINTAINER=	pat@patmaddox.com
COMMENT=	Fast serialization library on top of dataclasses
WWW=		https://github.com/Fatal1ty/mashumaro

LICENSE=	APACHE20

RUN_DEPENDS=	${PYTHON_PKGNAMEPREFIX}typing-extensions>=4.1.0:devel/py-typing-extensions@${PY_FLAVOR}

USES=		python
USE_PYTHON=	autoplist concurrent distutils

.include <bsd.port.mk>

Added ports/maintain/ports/devel/py-mashumaro/distinfo.







>
>
>
1
2
3
TIMESTAMP = 1701245160
SHA256 (mashumaro-3.11.tar.gz) = b0b2443be4bdad29bb209d91fe4a2a918fbd7b63cccfeb457c7eeb567db02f5e
SIZE (mashumaro-3.11.tar.gz) = 123743

Added ports/maintain/ports/devel/py-mashumaro/pkg-descr.



























>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
When using dataclasses, you often need to dump and load objects based on the
schema you have. Mashumaro not only lets you save and load things in different
ways, but it also does it super quick.

Key features:

- One of the fastest libraries
- Mature and time-tested
- Easy to use out of the box
- Highly customizable
- Built-in support for JSON, YAML, MessagePack, TOML
- Built-in support for almost all Python types including typing-extensions
- JSON Schema generation

Added ports/maintain/ports/devel/py-minimal-snowplow-tracker/Makefile.







































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
PORTNAME=	minimal-snowplow-tracker
PORTVERSION=	0.0.2
CATEGORIES=	devel python
MASTER_SITES=	PYPI
PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}

MAINTAINER=	pat@patmaddox.com
COMMENT=	Snowplow event tracker. Add analytics to Python apps and webapps
WWW=		https://pypi.org/project/minimal-snowplow-tracker/

LICENSE=	APACHE20

RUN_DEPENDS=	${PYTHON_PKGNAMEPREFIX}requests>=2.2.1,<3.0:www/py-requests@${PY_FLAVOR} \
		${PYTHON_PKGNAMEPREFIX}six>=1.9.0,<2.0:devel/py-six@${PY_FLAVOR}

USES=		python
USE_PYTHON=	autoplist concurrent distutils

.include <bsd.port.mk>

Added ports/maintain/ports/devel/py-minimal-snowplow-tracker/distinfo.







>
>
>
1
2
3
TIMESTAMP = 1691524073
SHA256 (minimal-snowplow-tracker-0.0.2.tar.gz) = acabf7572db0e7f5cbf6983d495eef54081f71be392330eb3aadb9ccb39daaa4
SIZE (minimal-snowplow-tracker-0.0.2.tar.gz) = 12542

Added ports/maintain/ports/devel/py-minimal-snowplow-tracker/pkg-descr.

























>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
Add analytics to your Python apps and Python games with the Snowplow event
tracker for Python.

With this tracker you can collect event data from your Python-based
applications, games or Python web servers/frameworks.

This is a fork of the original Snowplow Python tracker v0.8.0, with a number of
changes:
- All emitters except for the basic one removed
- Tests converted to docker
- All tracking event methods except for structured/unstructured removed
- Python 3.7 supported

Added ports/maintain/ports/devel/py-tree-sitter/Makefile.



































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
PORTNAME=	tree-sitter
PORTVERSION=	0.20.4
CATEGORIES=	devel python
MASTER_SITES=	PYPI
PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}
DISTNAME=	tree_sitter-${PORTVERSION}

MAINTAINER=	pat@patmaddox.com
COMMENT=	Python bindings to the Tree-sitter parsing library
WWW=		https://github.com/tree-sitter/py-tree-sitter

LICENSE=	MIT

USES=		python
USE_PYTHON=	autoplist concurrent distutils

.include <bsd.port.mk>

Added ports/maintain/ports/devel/py-tree-sitter/distinfo.







>
>
>
1
2
3
TIMESTAMP = 1701238318
SHA256 (tree_sitter-0.20.4.tar.gz) = 6adb123e2f3e56399bbf2359924633c882cc40ee8344885200bca0922f713be5
SIZE (tree_sitter-0.20.4.tar.gz) = 140726

Added ports/maintain/ports/devel/py-tree-sitter/pkg-descr.









>
>
>
>
1
2
3
4
py-tree-sitter provides Python bindings to the tree-sitter parsing library.

This package currently only works with Python 3. There are no library
dependencies, but you do need to have a C compiler installed.

Added ports/maintain/ports/textproc/ox-gfm.el/Makefile.

































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
PORTNAME=	ox-gfm.el
PORTVERSION=	1.0p20220910
CATEGORIES=	textproc

MAINTAINER=	pat@patmaddox.com
COMMENT=	Github Flavored Markdown exporter for Org Mode
WWW=		https://github.com/larstvei/ox-gfm

LICENSE=	NONE

USES=			emacs
EMACS_NO_DEPENDS=	yes

USE_GITHUB=	yes
GH_ACCOUNT=	larstvei
GH_PROJECT=	ox-gfm
GH_TAGNAME=	46faa67

NO_ARCH=	yes
NO_BUILD=	yes

OPTIONS_DEFINE=	DOCS

do-install:
	@${MKDIR} ${STAGEDIR}${PREFIX}/${EMACS_SITE_LISPDIR}
	${INSTALL_DATA} ${WRKSRC}/*.el ${STAGEDIR}${PREFIX}/${EMACS_SITE_LISPDIR}

post-install-DOCS-on:
	@${MKDIR} ${STAGEDIR}${DOCSDIR}
	${INSTALL_DATA} ${WRKSRC}/*.md ${STAGEDIR}${DOCSDIR}

.include <bsd.port.mk>

Added ports/maintain/ports/textproc/ox-gfm.el/distinfo.







>
>
>
1
2
3
TIMESTAMP = 1702447167
SHA256 (larstvei-ox-gfm-1.0p20220910-46faa67_GH0.tar.gz) = 82736b4d1d3dfaa098aaabb97164d12e8d30b5c4ae239c03422be2bdcd7a5b6d
SIZE (larstvei-ox-gfm-1.0p20220910-46faa67_GH0.tar.gz) = 4748

Added ports/maintain/ports/textproc/ox-gfm.el/pkg-descr.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
This package adds an Org mode export backend for GitHub Flavored
Markdown.

Exporting to Github Flavored Markdown is available through Org mode's
export dispatcher once ox-gfm is loaded. Alternatively, exporting can
be triggered by calling the (autoloaded) function M-x
org-gfm-export-to-markdown.

Added ports/maintain/ports/textproc/ox-gfm.el/pkg-plist.





>
>
1
2
%%PORTDOCS%%%%DOCSDIR%%/README.md
%%EMACS_SITE_LISPDIR%%/ox-gfm.el

Added ports/maintain/submissions/Makefile.



















>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
SUBMISSIONS!=	ls */Makefile | sed -e 's|/Makefile$$||'

all:
.for s in ${SUBMISSIONS}
.if !exists(${s}/gitmessage)
.error Missing ${s}/gitmessage
.endif
	make -C ${s}
.endfor

Added ports/maintain/submissions/ox-gfm.el/Makefile.







>
>
>
1
2
3
PORTS=		textproc/ox-gfm.el

.include "../submit.mk"

Added ports/maintain/submissions/ox-gfm.el/gitmessage.

























>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
textproc/ox-gfm.el: new port

This package adds an Org mode export backend for GitHub Flavored
Markdown.

Exporting to Github Flavored Markdown is available through Org mode's
export dispatcher once ox-gfm is loaded. Alternatively, exporting can
be triggered by calling the (autoloaded) function M-x
org-gfm-export-to-markdown.

PR:	275740
Author:	Pat Maddox <pat@patmaddox.com>

Added ports/maintain/submissions/ox-gfm.el/ox-gfm.el.patch.





























































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
From cb9540e51ae947b8b0f5962d0dc4922b3880b99a Mon Sep 17 00:00:00 2001
From: Pat Maddox <pat@patmaddox.com>
Date: Tue, 12 Dec 2023 23:27:33 -0800
Subject: [PATCH] textproc/ox-gfm.el: new port

This package adds an Org mode export backend for GitHub Flavored
Markdown.

Exporting to Github Flavored Markdown is available through Org mode's
export dispatcher once ox-gfm is loaded. Alternatively, exporting can
be triggered by calling the (autoloaded) function M-x
org-gfm-export-to-markdown.

PR:	275740
Author:	Pat Maddox <pat@patmaddox.com>
---
 textproc/Makefile            |  1 +
 textproc/ox-gfm.el/Makefile  | 32 ++++++++++++++++++++++++++++++++
 textproc/ox-gfm.el/distinfo  |  3 +++
 textproc/ox-gfm.el/pkg-descr |  7 +++++++
 textproc/ox-gfm.el/pkg-plist |  2 ++
 5 files changed, 45 insertions(+)
 create mode 100644 textproc/ox-gfm.el/Makefile
 create mode 100644 textproc/ox-gfm.el/distinfo
 create mode 100644 textproc/ox-gfm.el/pkg-descr
 create mode 100644 textproc/ox-gfm.el/pkg-plist

diff --git a/textproc/Makefile b/textproc/Makefile
index 5155ac8446a4..204525150374 100644
--- a/textproc/Makefile
+++ b/textproc/Makefile
@@ -526,6 +526,7 @@
     SUBDIR += openvanilla-framework
     SUBDIR += or-aspell
     SUBDIR += ov
+    SUBDIR += ox-gfm.el
     SUBDIR += p5-AI-Categorizer
     SUBDIR += p5-Algorithm-CheckDigits
     SUBDIR += p5-Algorithm-RabinKarp
diff --git a/textproc/ox-gfm.el/Makefile b/textproc/ox-gfm.el/Makefile
new file mode 100644
index 000000000000..927173e48956
--- /dev/null
+++ b/textproc/ox-gfm.el/Makefile
@@ -0,0 +1,32 @@
+PORTNAME=	ox-gfm.el
+PORTVERSION=	1.0p20220910
+CATEGORIES=	textproc
+
+MAINTAINER=	pat@patmaddox.com
+COMMENT=	Github Flavored Markdown exporter for Org Mode
+WWW=		https://github.com/larstvei/ox-gfm
+
+LICENSE=	NONE
+
+USES=			emacs
+EMACS_NO_DEPENDS=	yes
+
+USE_GITHUB=	yes
+GH_ACCOUNT=	larstvei
+GH_PROJECT=	ox-gfm
+GH_TAGNAME=	46faa67
+
+NO_ARCH=	yes
+NO_BUILD=	yes
+
+OPTIONS_DEFINE=	DOCS
+
+do-install:
+	@${MKDIR} ${STAGEDIR}${PREFIX}/${EMACS_SITE_LISPDIR}
+	${INSTALL_DATA} ${WRKSRC}/*.el ${STAGEDIR}${PREFIX}/${EMACS_SITE_LISPDIR}
+
+post-install-DOCS-on:
+	@${MKDIR} ${STAGEDIR}${DOCSDIR}
+	${INSTALL_DATA} ${WRKSRC}/*.md ${STAGEDIR}${DOCSDIR}
+
+.include <bsd.port.mk>
diff --git a/textproc/ox-gfm.el/distinfo b/textproc/ox-gfm.el/distinfo
new file mode 100644
index 000000000000..c9ab75f4ec72
--- /dev/null
+++ b/textproc/ox-gfm.el/distinfo
@@ -0,0 +1,3 @@
+TIMESTAMP = 1702447167
+SHA256 (larstvei-ox-gfm-1.0p20220910-46faa67_GH0.tar.gz) = 82736b4d1d3dfaa098aaabb97164d12e8d30b5c4ae239c03422be2bdcd7a5b6d
+SIZE (larstvei-ox-gfm-1.0p20220910-46faa67_GH0.tar.gz) = 4748
diff --git a/textproc/ox-gfm.el/pkg-descr b/textproc/ox-gfm.el/pkg-descr
new file mode 100644
index 000000000000..8275a2d3ff95
--- /dev/null
+++ b/textproc/ox-gfm.el/pkg-descr
@@ -0,0 +1,7 @@
+This package adds an Org mode export backend for GitHub Flavored
+Markdown.
+
+Exporting to Github Flavored Markdown is available through Org mode's
+export dispatcher once ox-gfm is loaded. Alternatively, exporting can
+be triggered by calling the (autoloaded) function M-x
+org-gfm-export-to-markdown.
diff --git a/textproc/ox-gfm.el/pkg-plist b/textproc/ox-gfm.el/pkg-plist
new file mode 100644
index 000000000000..1d3db5b6c911
--- /dev/null
+++ b/textproc/ox-gfm.el/pkg-plist
@@ -0,0 +1,2 @@
+%%PORTDOCS%%%%DOCSDIR%%/README.md
+%%EMACS_SITE_LISPDIR%%/ox-gfm.el
-- 
2.43.0

Added ports/maintain/submissions/py-dbt-core/Makefile.









>
>
>
>
1
2
3
4
PORTS=		databases/py-dbt-core devel/py-dbt-extractor databases/py-dbt-semantic-interfaces
TESTPORTS=	databases/py-dbt-snowflake

.include "../submit.mk"

Added ports/maintain/submissions/py-dbt-core/gitmessage.

























>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
databases/py-dbt-core: Update to 1.7.3 (and dependencies)

Changes: https://github.com/dbt-labs/dbt-core/blob/v1.7.3/CHANGELOG.md

databases/py-dbt-semantic-interfaces: Update to 0.4.1
Changes: https://github.com/dbt-labs/dbt-semantic-interfaces/blob/v0.4.1/CHANGELOG.md

devel/py-dbt-extractor: Update to 0.5.1
Changes: https://github.com/dbt-labs/dbt-extractor/blob/main/CHANGELOG.md#051

PR: 275583
Approved by: submitter is maintainer

Added ports/maintain/submissions/py-dbt-core/py-dbt-core.patch.































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
From 69cebeb8d1f8a48076364317b59202aa783f0244 Mon Sep 17 00:00:00 2001
From: Pat Maddox <pat@patmaddox.com>
Date: Thu, 7 Dec 2023 01:45:15 -0800
Subject: [PATCH] databases/py-dbt-core: Update to 1.7.3 (and dependencies)

Changes: https://github.com/dbt-labs/dbt-core/blob/v1.7.3/CHANGELOG.md

databases/py-dbt-semantic-interfaces: Update to 0.4.1
Changes: https://github.com/dbt-labs/dbt-semantic-interfaces/blob/v0.4.1/CHANGELOG.md

devel/py-dbt-extractor: Update to 0.5.1
Changes: https://github.com/dbt-labs/dbt-extractor/blob/main/CHANGELOG.md#051

PR: 275583
Approved by: submitter is maintainer
---
 databases/py-dbt-core/Makefile                |  22 +-
 databases/py-dbt-core/distinfo                |   6 +-
 databases/py-dbt-semantic-interfaces/Makefile |  12 +-
 databases/py-dbt-semantic-interfaces/distinfo |   6 +-
 devel/py-dbt-extractor/Makefile               |   3 +-
 devel/py-dbt-extractor/Makefile.crates        | 107 ++++-----
 devel/py-dbt-extractor/distinfo               | 220 +++++++++---------
 7 files changed, 192 insertions(+), 184 deletions(-)

diff --git a/databases/py-dbt-core/Makefile b/databases/py-dbt-core/Makefile
index ab50c57d2e6b..2c0f297933de 100644
--- a/databases/py-dbt-core/Makefile
+++ b/databases/py-dbt-core/Makefile
@@ -1,5 +1,5 @@
 PORTNAME=	dbt-core
-PORTVERSION=	1.6.0
+PORTVERSION=	1.7.3
 CATEGORIES=	databases python
 MASTER_SITES=	PYPI
 PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}
@@ -12,28 +12,28 @@ LICENSE=	APACHE20
 
 RUN_DEPENDS=	${PYTHON_PKGNAMEPREFIX}agate>=1.7,<2:textproc/py-agate@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}Jinja2>=3.1.2,<4:devel/py-Jinja2@${PY_FLAVOR} \
-		${PYTHON_PKGNAMEPREFIX}mashumaro>=3.8.1,<4:devel/py-mashumaro@${PY_FLAVOR} \
-		${PYTHON_PKGNAMEPREFIX}msgpack>0:devel/py-msgpack@${PY_FLAVOR} \
+		${PYTHON_PKGNAMEPREFIX}mashumaro>=3.9,<4:devel/py-mashumaro@${PY_FLAVOR} \
+		${PYTHON_PKGNAMEPREFIX}msgpack>0.5.6:devel/py-msgpack@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}logbook>0:devel/py-logbook@${PY_FLAVOR} \
-		${PYTHON_PKGNAMEPREFIX}click<9:devel/py-click@${PY_FLAVOR} \
+		${PYTHON_PKGNAMEPREFIX}click>=8.0.2,<9:devel/py-click@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}networkx>=2.3:math/py-networkx@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}colorama>=0.3.9,<0.5:devel/py-colorama@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}pathspec>=0.9,<0.12:devel/py-pathspec@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}isodate>=0.6,<0.7:devel/py-isodate@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}sqlparse>=0.2.3,<0.5:databases/py-sqlparse@${PY_FLAVOR} \
-		${PYTHON_PKGNAMEPREFIX}dbt-extractor>=0.4.1,<-0.5:devel/py-dbt-extractor@${PY_FLAVOR} \
-		${PYTHON_PKGNAMEPREFIX}hologram>=0.0.16,0.1:devel/py-hologram@${PY_FLAVOR} \
+		${PYTHON_PKGNAMEPREFIX}dbt-extractor>=0.5:devel/py-dbt-extractor@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}minimal-snowplow-tracker>=0.0.2,<0.1:devel/py-minimal-snowplow-tracker@${PY_FLAVOR} \
-		${PYTHON_PKGNAMEPREFIX}dbt-semantic-interfaces>=0.2.0,<0.3:databases/py-dbt-semantic-interfaces@${PY_FLAVOR} \
+		${PYTHON_PKGNAMEPREFIX}dbt-semantic-interfaces>=0.4.0,<0.5:databases/py-dbt-semantic-interfaces@${PY_FLAVOR} \
+		${PYTHON_PKGNAMEPREFIX}jsonschema>=3.0:devel/py-jsonschema@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}packaging>20.9:devel/py-packaging@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}protobuf>=4.0.0:devel/py-protobuf@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}pytz>=2015.7:devel/py-pytz@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}yaml>=6.0:devel/py-yaml@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}typing-extensions>=3.7.4:devel/py-typing-extensions@${PY_FLAVOR} \
-		${PYTHON_PKGNAMEPREFIX}cffi>0:devel/py-cffi@${PY_FLAVOR} \
-		${PYTHON_PKGNAMEPREFIX}idna>0:dns/py-idna@${PY_FLAVOR} \
-		${PYTHON_PKGNAMEPREFIX}requests>0:www/py-requests@${PY_FLAVOR} \
-		${PYTHON_PKGNAMEPREFIX}urllib3>0:net/py-urllib3@${PY_FLAVOR}
+		${PYTHON_PKGNAMEPREFIX}cffi>=1.9,<2:devel/py-cffi@${PY_FLAVOR} \
+		${PYTHON_PKGNAMEPREFIX}idna>=2.5,<4:dns/py-idna@${PY_FLAVOR} \
+		${PYTHON_PKGNAMEPREFIX}requests<3:www/py-requests@${PY_FLAVOR} \
+		${PYTHON_PKGNAMEPREFIX}urllib3>=1.0:net/py-urllib3@${PY_FLAVOR}
 
 USES=		python
 USE_PYTHON=	autoplist concurrent distutils
diff --git a/databases/py-dbt-core/distinfo b/databases/py-dbt-core/distinfo
index 0569abaa9854..db9f705948e8 100644
--- a/databases/py-dbt-core/distinfo
+++ b/databases/py-dbt-core/distinfo
@@ -1,3 +1,3 @@
-TIMESTAMP = 1691520002
-SHA256 (dbt-core-1.6.0.tar.gz) = 51da239af02ed449562d3c5caea903edee7b5427c47efd5a6439634c59858087
-SIZE (dbt-core-1.6.0.tar.gz) = 889429
+TIMESTAMP = 1701374624
+SHA256 (dbt-core-1.7.3.tar.gz) = a8f06203cd003ebcdda2af953f60c2b44fc0c69bd6cba79338225daedb1f41e1
+SIZE (dbt-core-1.7.3.tar.gz) = 913570
diff --git a/databases/py-dbt-semantic-interfaces/Makefile b/databases/py-dbt-semantic-interfaces/Makefile
index e7b5dca287e9..4a9d9a8acc2b 100644
--- a/databases/py-dbt-semantic-interfaces/Makefile
+++ b/databases/py-dbt-semantic-interfaces/Makefile
@@ -1,5 +1,5 @@
 PORTNAME=	dbt-semantic-interfaces
-PORTVERSION=	0.2.0
+PORTVERSION=	0.4.1
 CATEGORIES=	databases python
 MASTER_SITES=	PYPI
 PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}
@@ -15,14 +15,14 @@ BUILD_DEPENDS=	hatch:devel/py-hatch@${PY_FLAVOR} \
 		hatchling:devel/py-hatchling@${PY_FLAVOR}
 
 RUN_DEPENDS=	${PYTHON_PKGNAMEPREFIX}pydantic>=1.10,<2:devel/py-pydantic@${PY_FLAVOR} \
-		${PYTHON_PKGNAMEPREFIX}jsonschema>=3:devel/py-jsonschema@${PY_FLAVOR} \
+		${PYTHON_PKGNAMEPREFIX}jsonschema>=4,<5:devel/py-jsonschema@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}yaml>=6,<7:devel/py-yaml@${PY_FLAVOR} \
-		${PYTHON_PKGNAMEPREFIX}more-itertools>=8:devel/py-more-itertools@${PY_FLAVOR} \
+		${PYTHON_PKGNAMEPREFIX}more-itertools>=8,<11:devel/py-more-itertools@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}Jinja2>=3,<4:devel/py-Jinja2@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}click>=7,<9:devel/py-click@${PY_FLAVOR} \
-		${PYTHON_PKGNAMEPREFIX}dateutil>0:devel/py-dateutil@${PY_FLAVOR} \
-		${PYTHON_PKGNAMEPREFIX}importlib-metadata>=6,<7:devel/py-importlib-metadata@${PY_FLAVOR} \
-		${PYTHON_PKGNAMEPREFIX}typing-extensions>=4,<5:devel/py-typing-extensions@${PY_FLAVOR}
+		${PYTHON_PKGNAMEPREFIX}dateutil>=2,<3:devel/py-dateutil@${PY_FLAVOR} \
+		${PYTHON_PKGNAMEPREFIX}importlib-metadata>=6:devel/py-importlib-metadata@${PY_FLAVOR} \
+		${PYTHON_PKGNAMEPREFIX}typing-extensions>=4.4,<5:devel/py-typing-extensions@${PY_FLAVOR}
 
 USES=		python
 USE_PYTHON=	autoplist concurrent pep517
diff --git a/databases/py-dbt-semantic-interfaces/distinfo b/databases/py-dbt-semantic-interfaces/distinfo
index dd479ea1f4a5..c9d68933a768 100644
--- a/databases/py-dbt-semantic-interfaces/distinfo
+++ b/databases/py-dbt-semantic-interfaces/distinfo
@@ -1,3 +1,3 @@
-TIMESTAMP = 1691527201
-SHA256 (dbt_semantic_interfaces-0.2.0.tar.gz) = 4afecc6dc56f85ecce7ac424dea07561513467e0bf28ead7307b78a22e6b999b
-SIZE (dbt_semantic_interfaces-0.2.0.tar.gz) = 69838
+TIMESTAMP = 1701246479
+SHA256 (dbt_semantic_interfaces-0.4.1.tar.gz) = d79cb4b5e5103099874530735a117db61f7dcbb1a774f9ce65068c24cb7e874e
+SIZE (dbt_semantic_interfaces-0.4.1.tar.gz) = 72398
diff --git a/devel/py-dbt-extractor/Makefile b/devel/py-dbt-extractor/Makefile
index f7a45a7cccd9..8ec2b0672cd5 100644
--- a/devel/py-dbt-extractor/Makefile
+++ b/devel/py-dbt-extractor/Makefile
@@ -1,6 +1,5 @@
 PORTNAME=	dbt-extractor
-PORTVERSION=	0.4.1
-PORTREVISION=	1
+PORTVERSION=	0.5.1
 CATEGORIES=	devel python
 MASTER_SITES=	PYPI
 PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}
diff --git a/devel/py-dbt-extractor/Makefile.crates b/devel/py-dbt-extractor/Makefile.crates
index f73adc1ea048..b7b31f3be67a 100644
--- a/devel/py-dbt-extractor/Makefile.crates
+++ b/devel/py-dbt-extractor/Makefile.crates
@@ -1,57 +1,60 @@
-CARGO_CRATES=	aho-corasick-0.7.18 \
-		autocfg-1.0.1 \
+CARGO_CRATES=	aho-corasick-1.0.2 \
+		autocfg-1.1.0 \
 		bitflags-1.3.2 \
-		cc-1.0.69 \
+		cc-1.0.79 \
 		cfg-if-1.0.0 \
-		crossbeam-channel-0.5.1 \
-		crossbeam-deque-0.8.1 \
-		crossbeam-epoch-0.9.5 \
-		crossbeam-utils-0.8.5 \
-		either-1.6.1 \
+		crossbeam-channel-0.5.8 \
+		crossbeam-deque-0.8.3 \
+		crossbeam-epoch-0.9.15 \
+		crossbeam-utils-0.8.16 \
+		either-1.8.1 \
 		env_logger-0.8.4 \
-		getrandom-0.2.3 \
-		hermit-abi-0.1.19 \
-		indoc-0.3.6 \
-		indoc-impl-0.3.6 \
-		instant-0.1.10 \
-		lazy_static-1.4.0 \
-		libc-0.2.101 \
-		lock_api-0.4.4 \
-		log-0.4.14 \
-		memchr-2.4.1 \
-		memoffset-0.6.4 \
-		num_cpus-1.13.0 \
-		once_cell-1.9.0 \
-		parking_lot-0.11.1 \
-		parking_lot_core-0.8.3 \
-		paste-0.1.18 \
-		paste-impl-0.1.18 \
-		proc-macro-hack-0.5.19 \
-		proc-macro2-1.0.28 \
-		pyo3-0.15.1 \
-		pyo3-build-config-0.15.1 \
-		pyo3-macros-0.15.1 \
-		pyo3-macros-backend-0.15.1 \
+		getrandom-0.2.10 \
+		hermit-abi-0.3.2 \
+		indoc-1.0.9 \
+		libc-0.2.147 \
+		lock_api-0.4.10 \
+		log-0.4.19 \
+		memchr-2.5.0 \
+		memoffset-0.9.0 \
+		num_cpus-1.16.0 \
+		once_cell-1.18.0 \
+		parking_lot-0.12.1 \
+		parking_lot_core-0.9.8 \
+		proc-macro2-1.0.66 \
+		pyo3-0.19.1 \
+		pyo3-build-config-0.19.1 \
+		pyo3-ffi-0.19.1 \
+		pyo3-macros-0.19.1 \
+		pyo3-macros-backend-0.19.1 \
 		quickcheck-1.0.3 \
 		quickcheck_macros-1.0.0 \
-		quote-1.0.9 \
-		rand-0.8.4 \
-		rand_core-0.6.3 \
-		rayon-1.5.1 \
-		rayon-core-1.9.1 \
-		redox_syscall-0.2.10 \
-		regex-1.5.4 \
-		regex-syntax-0.6.25 \
-		scopeguard-1.1.0 \
-		smallvec-1.6.1 \
-		syn-1.0.75 \
-		thiserror-1.0.26 \
-		thiserror-impl-1.0.26 \
-		tree-sitter-0.19.5 \
-		unicode-xid-0.2.2 \
-		unindent-0.1.7 \
-		wasi-0.10.2+wasi-snapshot-preview1 \
-		winapi-0.3.9 \
-		winapi-i686-pc-windows-gnu-0.4.0 \
-		winapi-x86_64-pc-windows-gnu-0.4.0 \
-		tree-sitter-jinja2@git+https://github.com/dbt-labs/tree-sitter-jinja2?tag=v0.1.0\#52da7b0b1480b23381ea84cf5ea3bf058dd6d8c4
+		quote-1.0.31 \
+		rand-0.8.5 \
+		rand_core-0.6.4 \
+		rayon-1.7.0 \
+		rayon-core-1.11.0 \
+		redox_syscall-0.3.5 \
+		regex-1.9.1 \
+		regex-automata-0.3.3 \
+		regex-syntax-0.7.4 \
+		scopeguard-1.2.0 \
+		smallvec-1.11.0 \
+		syn-1.0.109 \
+		syn-2.0.26 \
+		target-lexicon-0.12.10 \
+		thiserror-1.0.43 \
+		thiserror-impl-1.0.43 \
+		tree-sitter-0.20.10 \
+		unicode-ident-1.0.11 \
+		unindent-0.1.11 \
+		wasi-0.11.0+wasi-snapshot-preview1 \
+		windows-targets-0.48.1 \
+		windows_aarch64_gnullvm-0.48.0 \
+		windows_aarch64_msvc-0.48.0 \
+		windows_i686_gnu-0.48.0 \
+		windows_i686_msvc-0.48.0 \
+		windows_x86_64_gnu-0.48.0 \
+		windows_x86_64_gnullvm-0.48.0 \
+		windows_x86_64_msvc-0.48.0 \
+		tree-sitter-jinja2@git+https://github.com/dbt-labs/tree-sitter-jinja2?tag=v0.2.0\#c9b092eff38bd6943254ad0373006d83c100a8c0
diff --git a/devel/py-dbt-extractor/distinfo b/devel/py-dbt-extractor/distinfo
index babcf06ef92c..028e68ccd980 100644
--- a/devel/py-dbt-extractor/distinfo
+++ b/devel/py-dbt-extractor/distinfo
@@ -1,117 +1,123 @@
-TIMESTAMP = 1691560041
-SHA256 (dbt_extractor-0.4.1.tar.gz) = 75b1c665699ec0f1ffce1ba3d776f7dfce802156f22e70a7b9c8f0b4d7e80f42
-SIZE (dbt_extractor-0.4.1.tar.gz) = 264647
-SHA256 (rust/crates/aho-corasick-0.7.18.crate) = 1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f
-SIZE (rust/crates/aho-corasick-0.7.18.crate) = 112923
-SHA256 (rust/crates/autocfg-1.0.1.crate) = cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a
-SIZE (rust/crates/autocfg-1.0.1.crate) = 12908
+TIMESTAMP = 1701377668
+SHA256 (dbt_extractor-0.5.1.tar.gz) = cd5d95576a8dea4190240aaf9936a37fd74b4b7913ca69a3c368fc4472bb7e13
+SIZE (dbt_extractor-0.5.1.tar.gz) = 266278
+SHA256 (rust/crates/aho-corasick-1.0.2.crate) = 43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41
+SIZE (rust/crates/aho-corasick-1.0.2.crate) = 167694
+SHA256 (rust/crates/autocfg-1.1.0.crate) = d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa
+SIZE (rust/crates/autocfg-1.1.0.crate) = 13272
 SHA256 (rust/crates/bitflags-1.3.2.crate) = bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a
 SIZE (rust/crates/bitflags-1.3.2.crate) = 23021
-SHA256 (rust/crates/cc-1.0.69.crate) = e70cc2f62c6ce1868963827bd677764c62d07c3d9a3e1fb1177ee1a9ab199eb2
-SIZE (rust/crates/cc-1.0.69.crate) = 56044
+SHA256 (rust/crates/cc-1.0.79.crate) = 50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f
+SIZE (rust/crates/cc-1.0.79.crate) = 62624
 SHA256 (rust/crates/cfg-if-1.0.0.crate) = baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd
 SIZE (rust/crates/cfg-if-1.0.0.crate) = 7934
-SHA256 (rust/crates/crossbeam-channel-0.5.1.crate) = 06ed27e177f16d65f0f0c22a213e17c696ace5dd64b14258b52f9417ccb52db4
-SIZE (rust/crates/crossbeam-channel-0.5.1.crate) = 86919
-SHA256 (rust/crates/crossbeam-deque-0.8.1.crate) = 6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e
-SIZE (rust/crates/crossbeam-deque-0.8.1.crate) = 20852
-SHA256 (rust/crates/crossbeam-epoch-0.9.5.crate) = 4ec02e091aa634e2c3ada4a392989e7c3116673ef0ac5b72232439094d73b7fd
-SIZE (rust/crates/crossbeam-epoch-0.9.5.crate) = 43883
-SHA256 (rust/crates/crossbeam-utils-0.8.5.crate) = d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db
-SIZE (rust/crates/crossbeam-utils-0.8.5.crate) = 38414
-SHA256 (rust/crates/either-1.6.1.crate) = e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457
-SIZE (rust/crates/either-1.6.1.crate) = 13641
+SHA256 (rust/crates/crossbeam-channel-0.5.8.crate) = a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200
+SIZE (rust/crates/crossbeam-channel-0.5.8.crate) = 90455
+SHA256 (rust/crates/crossbeam-deque-0.8.3.crate) = ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef
+SIZE (rust/crates/crossbeam-deque-0.8.3.crate) = 21746
+SHA256 (rust/crates/crossbeam-epoch-0.9.15.crate) = ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7
+SIZE (rust/crates/crossbeam-epoch-0.9.15.crate) = 48553
+SHA256 (rust/crates/crossbeam-utils-0.8.16.crate) = 5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294
+SIZE (rust/crates/crossbeam-utils-0.8.16.crate) = 42508
+SHA256 (rust/crates/either-1.8.1.crate) = 7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91
+SIZE (rust/crates/either-1.8.1.crate) = 16027
 SHA256 (rust/crates/env_logger-0.8.4.crate) = a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3
 SIZE (rust/crates/env_logger-0.8.4.crate) = 33342
-SHA256 (rust/crates/getrandom-0.2.3.crate) = 7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753
-SIZE (rust/crates/getrandom-0.2.3.crate) = 26261
-SHA256 (rust/crates/hermit-abi-0.1.19.crate) = 62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33
-SIZE (rust/crates/hermit-abi-0.1.19.crate) = 9979
-SHA256 (rust/crates/indoc-0.3.6.crate) = 47741a8bc60fb26eb8d6e0238bbb26d8575ff623fdc97b1a2c00c050b9684ed8
-SIZE (rust/crates/indoc-0.3.6.crate) = 9663
-SHA256 (rust/crates/indoc-impl-0.3.6.crate) = ce046d161f000fffde5f432a0d034d0341dc152643b2598ed5bfce44c4f3a8f0
-SIZE (rust/crates/indoc-impl-0.3.6.crate) = 7933
-SHA256 (rust/crates/instant-0.1.10.crate) = bee0328b1209d157ef001c94dd85b4f8f64139adb0eac2659f4b08382b2f474d
-SIZE (rust/crates/instant-0.1.10.crate) = 5218
-SHA256 (rust/crates/lazy_static-1.4.0.crate) = e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646
-SIZE (rust/crates/lazy_static-1.4.0.crate) = 10443
-SHA256 (rust/crates/libc-0.2.101.crate) = 3cb00336871be5ed2c8ed44b60ae9959dc5b9f08539422ed43f09e34ecaeba21
-SIZE (rust/crates/libc-0.2.101.crate) = 530003
-SHA256 (rust/crates/lock_api-0.4.4.crate) = 0382880606dff6d15c9476c416d18690b72742aa7b605bb6dd6ec9030fbf07eb
-SIZE (rust/crates/lock_api-0.4.4.crate) = 20356
-SHA256 (rust/crates/log-0.4.14.crate) = 51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710
-SIZE (rust/crates/log-0.4.14.crate) = 34582
-SHA256 (rust/crates/memchr-2.4.1.crate) = 308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a
-SIZE (rust/crates/memchr-2.4.1.crate) = 64977
-SHA256 (rust/crates/memoffset-0.6.4.crate) = 59accc507f1338036a0477ef61afdae33cde60840f4dfe481319ce3ad116ddf9
-SIZE (rust/crates/memoffset-0.6.4.crate) = 7664
-SHA256 (rust/crates/num_cpus-1.13.0.crate) = 05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3
-SIZE (rust/crates/num_cpus-1.13.0.crate) = 14704
-SHA256 (rust/crates/once_cell-1.9.0.crate) = da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5
-SIZE (rust/crates/once_cell-1.9.0.crate) = 30702
-SHA256 (rust/crates/parking_lot-0.11.1.crate) = 6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb
-SIZE (rust/crates/parking_lot-0.11.1.crate) = 39854
-SHA256 (rust/crates/parking_lot_core-0.8.3.crate) = fa7a782938e745763fe6907fc6ba86946d72f49fe7e21de074e08128a99fb018
-SIZE (rust/crates/parking_lot_core-0.8.3.crate) = 32147
-SHA256 (rust/crates/paste-0.1.18.crate) = 45ca20c77d80be666aef2b45486da86238fabe33e38306bd3118fe4af33fa880
-SIZE (rust/crates/paste-0.1.18.crate) = 12259
-SHA256 (rust/crates/paste-impl-0.1.18.crate) = d95a7db200b97ef370c8e6de0088252f7e0dfff7d047a28528e47456c0fc98b6
-SIZE (rust/crates/paste-impl-0.1.18.crate) = 9451
-SHA256 (rust/crates/proc-macro-hack-0.5.19.crate) = dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5
-SIZE (rust/crates/proc-macro-hack-0.5.19.crate) = 15556
-SHA256 (rust/crates/proc-macro2-1.0.28.crate) = 5c7ed8b8c7b886ea3ed7dde405212185f423ab44682667c8c6dd14aa1d9f6612
-SIZE (rust/crates/proc-macro2-1.0.28.crate) = 38732
-SHA256 (rust/crates/pyo3-0.15.1.crate) = 7cf01dbf1c05af0a14c7779ed6f3aa9deac9c3419606ac9de537a2d649005720
-SIZE (rust/crates/pyo3-0.15.1.crate) = 370722
-SHA256 (rust/crates/pyo3-build-config-0.15.1.crate) = dbf9e4d128bfbddc898ad3409900080d8d5095c379632fbbfbb9c8cfb1fb852b
-SIZE (rust/crates/pyo3-build-config-0.15.1.crate) = 22095
-SHA256 (rust/crates/pyo3-macros-0.15.1.crate) = 67701eb32b1f9a9722b4bc54b548ff9d7ebfded011c12daece7b9063be1fd755
-SIZE (rust/crates/pyo3-macros-0.15.1.crate) = 7555
-SHA256 (rust/crates/pyo3-macros-backend-0.15.1.crate) = f44f09e825ee49a105f2c7b23ebee50886a9aee0746f4dd5a704138a64b0218a
-SIZE (rust/crates/pyo3-macros-backend-0.15.1.crate) = 46111
+SHA256 (rust/crates/getrandom-0.2.10.crate) = be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427
+SIZE (rust/crates/getrandom-0.2.10.crate) = 34955
+SHA256 (rust/crates/hermit-abi-0.3.2.crate) = 443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b
+SIZE (rust/crates/hermit-abi-0.3.2.crate) = 13783
+SHA256 (rust/crates/indoc-1.0.9.crate) = bfa799dd5ed20a7e349f3b4639aa80d74549c81716d9ec4f994c9b5815598306
+SIZE (rust/crates/indoc-1.0.9.crate) = 13475
+SHA256 (rust/crates/libc-0.2.147.crate) = b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3
+SIZE (rust/crates/libc-0.2.147.crate) = 686772
+SHA256 (rust/crates/lock_api-0.4.10.crate) = c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16
+SIZE (rust/crates/lock_api-0.4.10.crate) = 26713
+SHA256 (rust/crates/log-0.4.19.crate) = b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4
+SIZE (rust/crates/log-0.4.19.crate) = 38073
+SHA256 (rust/crates/memchr-2.5.0.crate) = 2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d
+SIZE (rust/crates/memchr-2.5.0.crate) = 65812
+SHA256 (rust/crates/memoffset-0.9.0.crate) = 5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c
+SIZE (rust/crates/memoffset-0.9.0.crate) = 9033
+SHA256 (rust/crates/num_cpus-1.16.0.crate) = 4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43
+SIZE (rust/crates/num_cpus-1.16.0.crate) = 15713
+SHA256 (rust/crates/once_cell-1.18.0.crate) = dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d
+SIZE (rust/crates/once_cell-1.18.0.crate) = 32969
+SHA256 (rust/crates/parking_lot-0.12.1.crate) = 3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f
+SIZE (rust/crates/parking_lot-0.12.1.crate) = 40967
+SHA256 (rust/crates/parking_lot_core-0.9.8.crate) = 93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447
+SIZE (rust/crates/parking_lot_core-0.9.8.crate) = 32383
+SHA256 (rust/crates/proc-macro2-1.0.66.crate) = 18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9
+SIZE (rust/crates/proc-macro2-1.0.66.crate) = 43575
+SHA256 (rust/crates/pyo3-0.19.1.crate) = ffb88ae05f306b4bfcde40ac4a51dc0b05936a9207a4b75b798c7729c4258a59
+SIZE (rust/crates/pyo3-0.19.1.crate) = 418297
+SHA256 (rust/crates/pyo3-build-config-0.19.1.crate) = 554db24f0b3c180a9c0b1268f91287ab3f17c162e15b54caaae5a6b3773396b0
+SIZE (rust/crates/pyo3-build-config-0.19.1.crate) = 29205
+SHA256 (rust/crates/pyo3-ffi-0.19.1.crate) = 922ede8759e8600ad4da3195ae41259654b9c55da4f7eec84a0ccc7d067a70a4
+SIZE (rust/crates/pyo3-ffi-0.19.1.crate) = 64837
+SHA256 (rust/crates/pyo3-macros-0.19.1.crate) = 8a5caec6a1dd355964a841fcbeeb1b89fe4146c87295573f94228911af3cc5a2
+SIZE (rust/crates/pyo3-macros-0.19.1.crate) = 7173
+SHA256 (rust/crates/pyo3-macros-backend-0.19.1.crate) = e0b78ccbb160db1556cdb6fd96c50334c5d4ec44dc5e0a968d0a1208fa0efa8b
+SIZE (rust/crates/pyo3-macros-backend-0.19.1.crate) = 49916
 SHA256 (rust/crates/quickcheck-1.0.3.crate) = 588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6
 SIZE (rust/crates/quickcheck-1.0.3.crate) = 28069
 SHA256 (rust/crates/quickcheck_macros-1.0.0.crate) = b22a693222d716a9587786f37ac3f6b4faedb5b80c23914e7303ff5a1d8016e9
 SIZE (rust/crates/quickcheck_macros-1.0.0.crate) = 5275
-SHA256 (rust/crates/quote-1.0.9.crate) = c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7
-SIZE (rust/crates/quote-1.0.9.crate) = 25042
-SHA256 (rust/crates/rand-0.8.4.crate) = 2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8
-SIZE (rust/crates/rand-0.8.4.crate) = 87406
-SHA256 (rust/crates/rand_core-0.6.3.crate) = d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7
-SIZE (rust/crates/rand_core-0.6.3.crate) = 21938
-SHA256 (rust/crates/rayon-1.5.1.crate) = c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90
-SIZE (rust/crates/rayon-1.5.1.crate) = 160424
-SHA256 (rust/crates/rayon-core-1.9.1.crate) = d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e
-SIZE (rust/crates/rayon-core-1.9.1.crate) = 64961
-SHA256 (rust/crates/redox_syscall-0.2.10.crate) = 8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff
-SIZE (rust/crates/redox_syscall-0.2.10.crate) = 23582
-SHA256 (rust/crates/regex-1.5.4.crate) = d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461
-SIZE (rust/crates/regex-1.5.4.crate) = 236581
-SHA256 (rust/crates/regex-syntax-0.6.25.crate) = f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b
-SIZE (rust/crates/regex-syntax-0.6.25.crate) = 293293
-SHA256 (rust/crates/scopeguard-1.1.0.crate) = d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd
-SIZE (rust/crates/scopeguard-1.1.0.crate) = 11470
-SHA256 (rust/crates/smallvec-1.6.1.crate) = fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e
-SIZE (rust/crates/smallvec-1.6.1.crate) = 26444
-SHA256 (rust/crates/syn-1.0.75.crate) = b7f58f7e8eaa0009c5fec437aabf511bd9933e4b2d7407bd05273c01a8906ea7
-SIZE (rust/crates/syn-1.0.75.crate) = 233068
-SHA256 (rust/crates/thiserror-1.0.26.crate) = 93119e4feac1cbe6c798c34d3a53ea0026b0b1de6a120deef895137c0529bfe2
-SIZE (rust/crates/thiserror-1.0.26.crate) = 16711
-SHA256 (rust/crates/thiserror-impl-1.0.26.crate) = 060d69a0afe7796bf42e9e2ff91f5ee691fb15c53d38b4b62a9a53eb23164745
-SIZE (rust/crates/thiserror-impl-1.0.26.crate) = 13431
-SHA256 (rust/crates/tree-sitter-0.19.5.crate) = ad726ec26496bf4c083fff0f43d4eb3a2ad1bba305323af5ff91383c0b6ecac0
-SIZE (rust/crates/tree-sitter-0.19.5.crate) = 125205
-SHA256 (rust/crates/unicode-xid-0.2.2.crate) = 8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3
-SIZE (rust/crates/unicode-xid-0.2.2.crate) = 14955
-SHA256 (rust/crates/unindent-0.1.7.crate) = f14ee04d9415b52b3aeab06258a3f07093182b88ba0f9b8d203f211a7a7d41c7
-SIZE (rust/crates/unindent-0.1.7.crate) = 8346
-SHA256 (rust/crates/wasi-0.10.2+wasi-snapshot-preview1.crate) = fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6
-SIZE (rust/crates/wasi-0.10.2+wasi-snapshot-preview1.crate) = 27505
-SHA256 (rust/crates/winapi-0.3.9.crate) = 5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419
-SIZE (rust/crates/winapi-0.3.9.crate) = 1200382
-SHA256 (rust/crates/winapi-i686-pc-windows-gnu-0.4.0.crate) = ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6
-SIZE (rust/crates/winapi-i686-pc-windows-gnu-0.4.0.crate) = 2918815
-SHA256 (rust/crates/winapi-x86_64-pc-windows-gnu-0.4.0.crate) = 712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f
-SIZE (rust/crates/winapi-x86_64-pc-windows-gnu-0.4.0.crate) = 2947998
-SHA256 (dbt-labs-tree-sitter-jinja2-v0.1.0_GH0.tar.gz) = 2513994450f81a0674676fa451d4eea9ea4e665e6f7c1e1f4577ff9466feb765
-SIZE (dbt-labs-tree-sitter-jinja2-v0.1.0_GH0.tar.gz) = 22280
+SHA256 (rust/crates/quote-1.0.31.crate) = 5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0
+SIZE (rust/crates/quote-1.0.31.crate) = 27881
+SHA256 (rust/crates/rand-0.8.5.crate) = 34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404
+SIZE (rust/crates/rand-0.8.5.crate) = 87113
+SHA256 (rust/crates/rand_core-0.6.4.crate) = ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c
+SIZE (rust/crates/rand_core-0.6.4.crate) = 22666
+SHA256 (rust/crates/rayon-1.7.0.crate) = 1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b
+SIZE (rust/crates/rayon-1.7.0.crate) = 169488
+SHA256 (rust/crates/rayon-core-1.11.0.crate) = 4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d
+SIZE (rust/crates/rayon-core-1.11.0.crate) = 73118
+SHA256 (rust/crates/redox_syscall-0.3.5.crate) = 567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29
+SIZE (rust/crates/redox_syscall-0.3.5.crate) = 23404
+SHA256 (rust/crates/regex-1.9.1.crate) = b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575
+SIZE (rust/crates/regex-1.9.1.crate) = 251978
+SHA256 (rust/crates/regex-automata-0.3.3.crate) = 39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310
+SIZE (rust/crates/regex-automata-0.3.3.crate) = 604301
+SHA256 (rust/crates/regex-syntax-0.7.4.crate) = e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2
+SIZE (rust/crates/regex-syntax-0.7.4.crate) = 343365
+SHA256 (rust/crates/scopeguard-1.2.0.crate) = 94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49
+SIZE (rust/crates/scopeguard-1.2.0.crate) = 11619
+SHA256 (rust/crates/smallvec-1.11.0.crate) = 62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9
+SIZE (rust/crates/smallvec-1.11.0.crate) = 34680
+SHA256 (rust/crates/syn-1.0.109.crate) = 72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237
+SIZE (rust/crates/syn-1.0.109.crate) = 237611
+SHA256 (rust/crates/syn-2.0.26.crate) = 45c3457aacde3c65315de5031ec191ce46604304d2446e803d71ade03308d970
+SIZE (rust/crates/syn-2.0.26.crate) = 241572
+SHA256 (rust/crates/target-lexicon-0.12.10.crate) = 1d2faeef5759ab89935255b1a4cd98e0baf99d1085e37d36599c625dac49ae8e
+SIZE (rust/crates/target-lexicon-0.12.10.crate) = 24544
+SHA256 (rust/crates/thiserror-1.0.43.crate) = a35fc5b8971143ca348fa6df4f024d4d55264f3468c71ad1c2f365b0a4d58c42
+SIZE (rust/crates/thiserror-1.0.43.crate) = 18735
+SHA256 (rust/crates/thiserror-impl-1.0.43.crate) = 463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f
+SIZE (rust/crates/thiserror-impl-1.0.43.crate) = 15062
+SHA256 (rust/crates/tree-sitter-0.20.10.crate) = e747b1f9b7b931ed39a548c1fae149101497de3c1fc8d9e18c62c1a66c683d3d
+SIZE (rust/crates/tree-sitter-0.20.10.crate) = 134502
+SHA256 (rust/crates/unicode-ident-1.0.11.crate) = 301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c
+SIZE (rust/crates/unicode-ident-1.0.11.crate) = 42067
+SHA256 (rust/crates/unindent-0.1.11.crate) = e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c
+SIZE (rust/crates/unindent-0.1.11.crate) = 7700
+SHA256 (rust/crates/wasi-0.11.0+wasi-snapshot-preview1.crate) = 9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423
+SIZE (rust/crates/wasi-0.11.0+wasi-snapshot-preview1.crate) = 28131
+SHA256 (rust/crates/windows-targets-0.48.1.crate) = 05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f
+SIZE (rust/crates/windows-targets-0.48.1.crate) = 6902
+SHA256 (rust/crates/windows_aarch64_gnullvm-0.48.0.crate) = 91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc
+SIZE (rust/crates/windows_aarch64_gnullvm-0.48.0.crate) = 366543
+SHA256 (rust/crates/windows_aarch64_msvc-0.48.0.crate) = b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3
+SIZE (rust/crates/windows_aarch64_msvc-0.48.0.crate) = 671479
+SHA256 (rust/crates/windows_i686_gnu-0.48.0.crate) = 622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241
+SIZE (rust/crates/windows_i686_gnu-0.48.0.crate) = 741490
+SHA256 (rust/crates/windows_i686_msvc-0.48.0.crate) = 4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00
+SIZE (rust/crates/windows_i686_msvc-0.48.0.crate) = 730056
+SHA256 (rust/crates/windows_x86_64_gnu-0.48.0.crate) = ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1
+SIZE (rust/crates/windows_x86_64_gnu-0.48.0.crate) = 703595
+SHA256 (rust/crates/windows_x86_64_gnullvm-0.48.0.crate) = 7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953
+SIZE (rust/crates/windows_x86_64_gnullvm-0.48.0.crate) = 366536
+SHA256 (rust/crates/windows_x86_64_msvc-0.48.0.crate) = 1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a
+SIZE (rust/crates/windows_x86_64_msvc-0.48.0.crate) = 671422
+SHA256 (dbt-labs-tree-sitter-jinja2-v0.2.0_GH0.tar.gz) = 29ac60e54a40e4bc43d30b1efcc9901ad8cb1328a74f02b3236b63ba7ebbb1ba
+SIZE (dbt-labs-tree-sitter-jinja2-v0.2.0_GH0.tar.gz) = 24053
-- 
2.42.1

Added ports/maintain/submissions/py-dbt-duckdb/Makefile.









>
>
>
>
1
2
3
4
PORTS=		databases/py-dbt-duckdb
DEPENDS=	py-dbt-core py-duckdb

.include "../submit.mk"

Added ports/maintain/submissions/py-dbt-duckdb/gitmessage.













>
>
>
>
>
>
1
2
3
4
5
6
databases/py-dbt-duckdb: Update to 1.7.0

Changes: https://github.com/duckdb/dbt-duckdb/releases/tag/1.7.0

PR: 275593
Approved by: submitter is maintainer

Added ports/maintain/submissions/py-dbt-duckdb/py-dbt-duckdb.patch.

































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
From 0523dda7ac96dda2ccb42b3b09560fd8cdc6c140 Mon Sep 17 00:00:00 2001
From: Pat Maddox <pat@patmaddox.com>
Date: Thu, 7 Dec 2023 02:16:23 -0800
Subject: [PATCH] databases/py-dbt-duckdb: Update to 1.7.0

Changes: https://github.com/duckdb/dbt-duckdb/releases/tag/1.7.0

PR: 275593
Approved by: submitter is maintainer
---
 databases/py-dbt-duckdb/Makefile | 4 ++--
 databases/py-dbt-duckdb/distinfo | 6 +++---
 2 files changed, 5 insertions(+), 5 deletions(-)

diff --git a/databases/py-dbt-duckdb/Makefile b/databases/py-dbt-duckdb/Makefile
index 94027b4a77e9..17dc4ccf12fc 100644
--- a/databases/py-dbt-duckdb/Makefile
+++ b/databases/py-dbt-duckdb/Makefile
@@ -1,5 +1,5 @@
 PORTNAME=	dbt-duckdb
-PORTVERSION=	1.6.0
+PORTVERSION=	1.7.0
 CATEGORIES=	databases python
 MASTER_SITES=	PYPI
 PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}
@@ -10,7 +10,7 @@ WWW=		https://github.com/jwills/dbt-duckdb
 
 LICENSE=	APACHE20
 
-RUN_DEPENDS=	${PYTHON_PKGNAMEPREFIX}dbt-core>=1.6.0,<2:databases/py-dbt-core@${PY_FLAVOR} \
+RUN_DEPENDS=	${PYTHON_PKGNAMEPREFIX}dbt-core>=1.7.0,<2:databases/py-dbt-core@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}duckdb>=0.7.0:databases/py-duckdb@${PY_FLAVOR}
 
 USES=		python
diff --git a/databases/py-dbt-duckdb/distinfo b/databases/py-dbt-duckdb/distinfo
index fdf6ecf16212..88d9ce3d1f74 100644
--- a/databases/py-dbt-duckdb/distinfo
+++ b/databases/py-dbt-duckdb/distinfo
@@ -1,3 +1,3 @@
-TIMESTAMP = 1691548339
-SHA256 (dbt-duckdb-1.6.0.tar.gz) = e36776d6ee7aa1554a236b7ffa93c556c4419920134a3801feef7c64b706db16
-SIZE (dbt-duckdb-1.6.0.tar.gz) = 45586
+TIMESTAMP = 1701384521
+SHA256 (dbt-duckdb-1.7.0.tar.gz) = e2479f5fdd5177eea37c081d8ea126d80ec51d95e69ea31132ade396538bf0a0
+SIZE (dbt-duckdb-1.7.0.tar.gz) = 49217
-- 
2.42.1

Added ports/maintain/submissions/py-dbt-snowflake/Makefile.









>
>
>
>
1
2
3
4
PORTS=		databases/py-dbt-snowflake
DEPENDS=	py-dbt-core

.include "../submit.mk"

Added ports/maintain/submissions/py-dbt-snowflake/gitmessage.



>
1
databases/py-dbt-snowflake: Update to 1.7.0

Added ports/maintain/submissions/py-dbt-snowflake/py-dbt-snowflake.patch.































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
From 2ffa1100db020475074698366bbfa82c5705fcbd Mon Sep 17 00:00:00 2001
From: Pat Maddox <pat@patmaddox.com>
Date: Wed, 6 Dec 2023 19:15:27 -0800
Subject: [PATCH] databases/py-dbt-snowflake: Update to 1.7.0

---
 databases/py-dbt-snowflake/Makefile | 6 +++---
 databases/py-dbt-snowflake/distinfo | 6 +++---
 2 files changed, 6 insertions(+), 6 deletions(-)

diff --git a/databases/py-dbt-snowflake/Makefile b/databases/py-dbt-snowflake/Makefile
index d09504defe8c..9c5e5dd24d9c 100644
--- a/databases/py-dbt-snowflake/Makefile
+++ b/databases/py-dbt-snowflake/Makefile
@@ -1,5 +1,5 @@
 PORTNAME=	dbt-snowflake
-PORTVERSION=	1.6.0
+PORTVERSION=	1.7.0
 CATEGORIES=	databases python
 MASTER_SITES=	PYPI
 PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}
@@ -11,9 +11,9 @@ WWW=		https://github.com/dbt-labs/dbt-snowflake
 LICENSE=	APACHE20
 
 RUN_DEPENDS=	${PYTHON_PKGNAMEPREFIX}agate>0:textproc/py-agate@${PY_FLAVOR} \
-		${PYTHON_PKGNAMEPREFIX}dbt-core>=1.6,<2:databases/py-dbt-core@${PY_FLAVOR} \
+		${PYTHON_PKGNAMEPREFIX}dbt-core>=1.7.3,<2:databases/py-dbt-core@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}snowflake-connector-python>=3.0,<4:databases/py-snowflake-connector-python@${PY_FLAVOR} \
-	        ${PYTHON_PKGNAMEPREFIX}keyring>0:security/py-keyring@${PY_FLAVOR}
+	        ${PYTHON_PKGNAMEPREFIX}keyring>=16.1.1:security/py-keyring@${PY_FLAVOR}
 
 USES=		python
 USE_PYTHON=	autoplist concurrent distutils
diff --git a/databases/py-dbt-snowflake/distinfo b/databases/py-dbt-snowflake/distinfo
index 76bf8dc3209c..68f6af68fb1d 100644
--- a/databases/py-dbt-snowflake/distinfo
+++ b/databases/py-dbt-snowflake/distinfo
@@ -1,3 +1,3 @@
-TIMESTAMP = 1691563233
-SHA256 (dbt-snowflake-1.6.0.tar.gz) = 1e710aeafceaa57318dce58789595c813cc1c551f43a0e1ec03d34c6659f504c
-SIZE (dbt-snowflake-1.6.0.tar.gz) = 31680
+TIMESTAMP = 1701384077
+SHA256 (dbt-snowflake-1.7.0.tar.gz) = 959943cdb0bcedc67203f0205a9ae21fba13f4c95ddf7089848a77b8c8a8d5f1
+SIZE (dbt-snowflake-1.7.0.tar.gz) = 33622
-- 
2.42.1

Added ports/maintain/submissions/py-duckdb/Makefile.









>
>
>
>
1
2
3
4
PORTS=		databases/py-duckdb
#TESTPORTS=	databases/py-dbt-duckdb

.include "../submit.mk"

Added ports/maintain/submissions/py-duckdb/gitmessage.







>
>
>
1
2
3
databases/py-duckdb: Fix broken build (0.9.2)

PR: 275464

Added ports/maintain/submissions/py-duckdb/py-duckdb.patch.



















































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
From a41f7217c0f8b90cacd2827fa3f717fad25f827a Mon Sep 17 00:00:00 2001
From: Pat Maddox <pat@patmaddox.com>
Date: Thu, 7 Dec 2023 02:11:17 -0800
Subject: [PATCH] databases/py-duckdb: Fix broken build (0.9.2)

PR: 275464
---
 databases/py-duckdb/Makefile | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/databases/py-duckdb/Makefile b/databases/py-duckdb/Makefile
index a770f50563fc..cb5b88a99eb5 100644
--- a/databases/py-duckdb/Makefile
+++ b/databases/py-duckdb/Makefile
@@ -25,6 +25,6 @@ TEST_WRKSRC=	${WRKDIR} # tests fail when run from the build directory due to a n
 TEST_ENV=	${MAKE_ENV} PYTHONPATH=${STAGEDIR}${PYTHONPREFIX_SITELIBDIR} # 2 tests fail, see https://github.com/duckdb/duckdb/issues/5642
 
 post-install:
-	@${STRIP_CMD} ${STAGEDIR}${PYTHON_SITELIBDIR}/duckdb.cpython-${PYTHON_SUFFIX}.so
+	@${STRIP_CMD} ${STAGEDIR}${PYTHON_SITELIBDIR}/duckdb/duckdb.cpython-${PYTHON_SUFFIX}.so
 
 .include <bsd.port.mk>
-- 
2.42.1

Added ports/maintain/submissions/py-mashumaro/Makefile.









>
>
>
>
1
2
3
4
PORTS=		devel/py-mashumaro
TESTPORTS=	databases/py-dbt-core

.include "../submit.mk"

Added ports/maintain/submissions/py-mashumaro/gitmessage.



>
1
devel/py-mashumaro: Update to 3.11

Added ports/maintain/submissions/py-mashumaro/py-mashumaro.patch.







































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
From a3bcef876b0ebda5f796bc8cb69970086acba847 Mon Sep 17 00:00:00 2001
From: Pat Maddox <pat@patmaddox.com>
Date: Wed, 6 Dec 2023 21:54:27 -0800
Subject: [PATCH] devel/py-mashumaro: Update to 3.11

---
 devel/py-mashumaro/Makefile | 2 +-
 devel/py-mashumaro/distinfo | 6 +++---
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/devel/py-mashumaro/Makefile b/devel/py-mashumaro/Makefile
index cab97f35a499..7931d7e14c4e 100644
--- a/devel/py-mashumaro/Makefile
+++ b/devel/py-mashumaro/Makefile
@@ -1,5 +1,5 @@
 PORTNAME=	mashumaro
-PORTVERSION=	3.9
+PORTVERSION=	3.11
 CATEGORIES=	devel python
 MASTER_SITES=	PYPI
 PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}
diff --git a/devel/py-mashumaro/distinfo b/devel/py-mashumaro/distinfo
index b48aaa96878c..1462c4f898fe 100644
--- a/devel/py-mashumaro/distinfo
+++ b/devel/py-mashumaro/distinfo
@@ -1,3 +1,3 @@
-TIMESTAMP = 1691525565
-SHA256 (mashumaro-3.9.tar.gz) = c179f3f29f7b88acc9472427ce9fc673072a04b3888ce4bd1cac94c266c8e587
-SIZE (mashumaro-3.9.tar.gz) = 106417
+TIMESTAMP = 1701245160
+SHA256 (mashumaro-3.11.tar.gz) = b0b2443be4bdad29bb209d91fe4a2a918fbd7b63cccfeb457c7eeb567db02f5e
+SIZE (mashumaro-3.11.tar.gz) = 123743
-- 
2.42.1

Added ports/maintain/submissions/py-snowflake-connector-python/Makefile.









>
>
>
>
1
2
3
4
PORTS=		databases/py-snowflake-connector-python
TESTPORTS=	databases/py-dbt-snowflake databases/py-snowddl databases/py-schemachange

.include "../submit.mk"

Added ports/maintain/submissions/py-snowflake-connector-python/gitmessage.



>
1
databases/py-snowflake-connector-python: Update to 3.5.0

Added ports/maintain/submissions/py-snowflake-connector-python/py-snowflake-connector-python.patch.







































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
From bdc9f69b99c3c0425ad3569679dde61f8ea8e9d6 Mon Sep 17 00:00:00 2001
From: Pat Maddox <pat@patmaddox.com>
Date: Wed, 6 Dec 2023 21:05:18 -0800
Subject: [PATCH] databases/py-snowflake-connector-python: Update to 3.5.0

---
 .../py-snowflake-connector-python/Makefile    |  3 +-
 .../py-snowflake-connector-python/distinfo    |  6 ++--
 ...oarrow__cpp_ArrowIterator_nanoarrow__ipc.c | 29 +++++++++++++++++++
 ...oarrow__cpp_ArrowIterator_nanoarrow__ipc.h | 11 +++++++
 4 files changed, 44 insertions(+), 5 deletions(-)
 create mode 100644 databases/py-snowflake-connector-python/files/patch-src_snowflake_connector_nanoarrow__cpp_ArrowIterator_nanoarrow__ipc.c
 create mode 100644 databases/py-snowflake-connector-python/files/patch-src_snowflake_connector_nanoarrow__cpp_ArrowIterator_nanoarrow__ipc.h

diff --git a/databases/py-snowflake-connector-python/Makefile b/databases/py-snowflake-connector-python/Makefile
index 5a0cee0ff8a9..c79e4370b369 100644
--- a/databases/py-snowflake-connector-python/Makefile
+++ b/databases/py-snowflake-connector-python/Makefile
@@ -1,5 +1,5 @@
 PORTNAME=	snowflake-connector-python
-PORTVERSION=	3.2.1
+PORTVERSION=	3.5.0
 CATEGORIES=	databases python
 MASTER_SITES=	PYPI
 PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}
@@ -23,7 +23,6 @@ PY_DEPENDS=	${PYTHON_PKGNAMEPREFIX}asn1crypto>0.24.0,<2.0.0:devel/py-asn1crypto@
 		${PYTHON_PKGNAMEPREFIX}cryptography>=3.1.0:security/py-cryptography@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}oscrypto<2.0:security/py-oscrypto@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}openssl>=16.2.0:security/py-openssl@${PY_FLAVOR} \
-		${PYTHON_PKGNAMEPREFIX}pycryptodomex>=3.5.1<4.0:security/py-pycryptodomex@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}pyjwt<3.0:www/py-pyjwt@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}pytz>0:devel/py-pytz@${PY_FLAVOR} \
 		${PYTHON_PKGNAMEPREFIX}requests<3.0:www/py-requests@${PY_FLAVOR} \
diff --git a/databases/py-snowflake-connector-python/distinfo b/databases/py-snowflake-connector-python/distinfo
index ed242d01ca82..7ef6b3e3b074 100644
--- a/databases/py-snowflake-connector-python/distinfo
+++ b/databases/py-snowflake-connector-python/distinfo
@@ -1,3 +1,3 @@
-TIMESTAMP = 1697518750
-SHA256 (snowflake-connector-python-3.2.1.tar.gz) = 2f92112964e4d36c67dbcf900f0b6c4b56a4ab0b3cf44a0d166d290e867a9d8b
-SIZE (snowflake-connector-python-3.2.1.tar.gz) = 419953
+TIMESTAMP = 1701367506
+SHA256 (snowflake-connector-python-3.5.0.tar.gz) = 654e4a1f68a491544bd8f7c5ab02eb8531df67c5f4309d5253bd204044f8a1b3
+SIZE (snowflake-connector-python-3.5.0.tar.gz) = 702274
diff --git a/databases/py-snowflake-connector-python/files/patch-src_snowflake_connector_nanoarrow__cpp_ArrowIterator_nanoarrow__ipc.c b/databases/py-snowflake-connector-python/files/patch-src_snowflake_connector_nanoarrow__cpp_ArrowIterator_nanoarrow__ipc.c
new file mode 100644
index 000000000000..46bb0a440466
--- /dev/null
+++ b/databases/py-snowflake-connector-python/files/patch-src_snowflake_connector_nanoarrow__cpp_ArrowIterator_nanoarrow__ipc.c
@@ -0,0 +1,29 @@
+--- src/snowflake/connector/nanoarrow_cpp/ArrowIterator/nanoarrow_ipc.c.orig	2023-11-30 19:17:00 UTC
++++ src/snowflake/connector/nanoarrow_cpp/ArrowIterator/nanoarrow_ipc.c
+@@ -21239,7 +21239,7 @@ static inline int ArrowIpcDecoderReadHeaderPrefix(stru
+ 
+   if (header_body_size_bytes == 0) {
+     ArrowErrorSet(error, "End of Arrow stream");
+-    return ENODATA;
++    return ENOATTR;
+   }
+ 
+   return NANOARROW_OK;
+@@ -22194,7 +22194,7 @@ static int ArrowIpcArrayStreamReaderNextHeader(
+     // is one of the valid outcomes) but we set the error anyway in case it gets
+     // propagated higher (e.g., if the stream is empty and there's no schema message)
+     ArrowErrorSet(&private_data->error, "No data available on stream");
+-    return ENODATA;
++    return ENOATTR;
+   } else if (bytes_read != 8) {
+     ArrowErrorSet(&private_data->error,
+                   "Expected at least 8 bytes in remainder of stream");
+@@ -22338,7 +22338,7 @@ static int ArrowIpcArrayStreamReaderGetNext(struct Arr
+   // Read + decode the next header
+   int result = ArrowIpcArrayStreamReaderNextHeader(
+       private_data, NANOARROW_IPC_MESSAGE_TYPE_RECORD_BATCH);
+-  if (result == ENODATA) {
++  if (result == ENOATTR) {
+     // Stream is finished either because there is no input or because
+     // end of stream bytes were read.
+     out->release = NULL;
diff --git a/databases/py-snowflake-connector-python/files/patch-src_snowflake_connector_nanoarrow__cpp_ArrowIterator_nanoarrow__ipc.h b/databases/py-snowflake-connector-python/files/patch-src_snowflake_connector_nanoarrow__cpp_ArrowIterator_nanoarrow__ipc.h
new file mode 100644
index 000000000000..cf93c5d9c978
--- /dev/null
+++ b/databases/py-snowflake-connector-python/files/patch-src_snowflake_connector_nanoarrow__cpp_ArrowIterator_nanoarrow__ipc.h
@@ -0,0 +1,11 @@
+--- src/snowflake/connector/nanoarrow_cpp/ArrowIterator/nanoarrow_ipc.h.orig	2023-11-30 19:16:47 UTC
++++ src/snowflake/connector/nanoarrow_cpp/ArrowIterator/nanoarrow_ipc.h
+@@ -193,7 +193,7 @@ void ArrowIpcDecoderReset(struct ArrowIpcDecoder* deco
+ /// The first 8 bytes of an Arrow IPC message are 0xFFFFFF followed by the size
+ /// of the header as a little-endian 32-bit integer. ArrowIpcDecoderPeekHeader() reads
+ /// these bytes and returns ESPIPE if there are not enough remaining bytes in data to read
+-/// the entire header message, EINVAL if the first 8 bytes are not valid, ENODATA if the
++/// the entire header message, EINVAL if the first 8 bytes are not valid, ENOATTR if the
+ /// Arrow end-of-stream indicator has been reached, or NANOARROW_OK otherwise.
+ ArrowErrorCode ArrowIpcDecoderPeekHeader(struct ArrowIpcDecoder* decoder,
+                                          struct ArrowBufferView data,
-- 
2.42.1

Added ports/maintain/submissions/py-tree-sitter/Makefile.







>
>
>
1
2
3
PORTS= devel/py-tree-sitter

.include "../submit.mk"

Added ports/maintain/submissions/py-tree-sitter/gitmessage.









>
>
>
>
1
2
3
4
devel/py-tree-sitter: Update to 0.20.4

PR: 275588
Approved by: submitter is maintainer

Added ports/maintain/submissions/py-tree-sitter/py-tree-sitter.patch.











































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
From 790c5d6d0a46bfdf38f9a4f6e225750ecebd7b55 Mon Sep 17 00:00:00 2001
From: Pat Maddox <pat@patmaddox.com>
Date: Wed, 6 Dec 2023 23:22:55 -0800
Subject: [PATCH] devel/py-tree-sitter: Update to 0.20.4

PR: 275588
Approved by: submitter is maintainer
---
 devel/py-tree-sitter/Makefile | 2 +-
 devel/py-tree-sitter/distinfo | 6 +++---
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/devel/py-tree-sitter/Makefile b/devel/py-tree-sitter/Makefile
index c5378357b67f..5bd743f59eb4 100644
--- a/devel/py-tree-sitter/Makefile
+++ b/devel/py-tree-sitter/Makefile
@@ -1,5 +1,5 @@
 PORTNAME=	tree-sitter
-PORTVERSION=	0.20.1
+PORTVERSION=	0.20.4
 CATEGORIES=	devel python
 MASTER_SITES=	PYPI
 PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}
diff --git a/devel/py-tree-sitter/distinfo b/devel/py-tree-sitter/distinfo
index 13f495383f29..126b381397ff 100644
--- a/devel/py-tree-sitter/distinfo
+++ b/devel/py-tree-sitter/distinfo
@@ -1,3 +1,3 @@
-TIMESTAMP = 1691531462
-SHA256 (tree_sitter-0.20.1.tar.gz) = e93f082c545d6649bcfb5d681ed255eb004a6ce22988971a128f40692feec60d
-SIZE (tree_sitter-0.20.1.tar.gz) = 126198
+TIMESTAMP = 1701238318
+SHA256 (tree_sitter-0.20.4.tar.gz) = 6adb123e2f3e56399bbf2359924633c882cc40ee8344885200bca0922f713be5
+SIZE (tree_sitter-0.20.4.tar.gz) = 140726
-- 
2.42.1

Added ports/maintain/submissions/submit.mk.













































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
NAME=		${.CURDIR:T}
PATCHFILE=	${NAME}.patch
ROOTDIR!=	realpath ../..
GITDIR=		${ROOTDIR}/freebsd-ports.git
GITMAIN=	${GITDIR}/main
#WORKTREE=	${GITDIR}/${NAME}
#POUDRIERE=	submit_${NAME:S/-/_/g}
WORKTREE=	${GITDIR}/submit
POUDRIERE=	submit
ALLPORTS!=	find ${ROOTDIR}/ports -type d -depth 2 | sed 's|${ROOTDIR}/ports/||' | sort

.PHONY: build patch patchfile poudriere worktree

${PATCHFILE}: gitmessage ${GITMAIN} ${WORKTREE} poudriere patch build patchfile
	@echo "done"

${GITMAIN}:
	git clone https://github.com/freebsd/freebsd-ports.git ${GITMAIN}

worktree:
${WORKTREE}:
	git -C ${GITMAIN} worktree add --detach ${WORKTREE} main

poudriere:
	if ! poudriere ports -l | grep '^${POUDRIERE}[[:space:]]' > /dev/null; then doas poudriere ports -c -p ${POUDRIERE} -m null -M ${WORKTREE}; fi

patch:
	git -C ${WORKTREE} reset --hard main
	git -C ${WORKTREE} clean -f
.ifdef DEPENDS
	for d in ${DEPENDS}; do git -C ${WORKTREE} am ${ROOTDIR}/submissions/$$d/$$d.patch; done
.endif
	for p in ${PORTS}; do if [ -d ${WORKTREE}/$$p ]; then rm -r ${WORKTREE}/$$p; fi; cp -Rp ${ROOTDIR}/ports/$$p ${WORKTREE}/$$(basename $$(dirname $$p)); done
	for p in ${PORTS}; do if ! grep -q $$(basename $$p) ${WORKTREE}/$$(dirname $$p)/Makefile; then awk -v new="    SUBDIR += $$(basename $$p)" '!done && $$0 > new { print new RS $$0; done=1; next } 1' ${WORKTREE}/$$(dirname $$p)/Makefile > Makefile.new && mv Makefile.new ${WORKTREE}/$$(dirname $$p)/Makefile; fi; done

build:
	doas poudriere pkgclean -y -j 132amd64 -p ${POUDRIERE} -C ${ALLPORTS}
	doas poudriere bulk -t -j 132amd64 -p ${POUDRIERE} -b latest -C ${PFLAGS} ${PORTS} ${TESTPORTS}

patchfile:
	if [ -f ${PATCHFILE} ]; then mv ${PATCHFILE} ${PATCHFILE}.orig; fi
	git -C ${WORKTREE} add .
	PORTSDIR=${WORKTREE} git -C ${WORKTREE} commit -F ${.CURDIR}/gitmessage
	git -C ${WORKTREE} format-patch -o ${.CURDIR} HEAD^
	if [ $$(ls *.patch | wc -l) -ne 1 ]; then echo "error: more than one .patch file present"; fi
	mv *.patch ${PATCHFILE}
	if [ -f ${PATCHFILE}.orig ]; then if diff -q --ignore-matching-lines '^From' --ignore-matching-lines '^Date:' ${PATCHFILE} ${PATCHFILE}.orig > /dev/null; then mv ${PATCHFILE}.orig ${PATCHFILE}; fi; fi
	rm -f ${PATCHFILE}.orig

debug:
	@echo "NAME: ${NAME}"
	@echo ".CURDIR: ${.CURDIR}"
	@echo "WORKTREE: ${WORKTREE}"
	@echo "POUDRIERE: ${POUDRIERE}"

Added ports/merge-branches.







>
>
>
1
2
3
main
lang-elixir-mode.el
ports-mgmt-poudriere-devel

Added ports/patmaddox/elixir-bundled-app/Makefile.























































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
# Example port that includes an elixir app's source code.
# This is not a good example of how to package and distribute an Elixir app,
# it's just for testing purposes to get familiar with aspects of the ports framework.

PORTNAME= elixir-bundled-app
DISTVERSION= g202301130937 # I forget what this is - basically a git timestamp?
DISTFILES= # not going to download anything, since the source code is bundled
MAINTAINER= pat@patmaddox.com
COMMENT= A simple port of an elixir application, that brings its own files.
WWW= https://github.com/patmaddox/home/blob/main/ports/patmaddox/elixir-bundled-app
CATEGORIES= patmaddox

ELIXIR_APP_NAME= example_bundled_app
ERLANG_VER?= 24
BUILD_DEPENDS= erlang-runtime${ERLANG_VER}>0:lang/erlang-runtime${ERLANG_VER} elixir-devel>0:lang/elixir-devel
MAKE_ENV+= PATH="${LOCALBASE}/lib/erlang${ERLANG_VER}/bin:${PATH}"
USE_LOCALE= en_US.UTF-8
MIX_ENV?= prod
.export MIX_ENV

# used to substitute the beam SMP version in generated files (e.g. rc script)
#SUB_LIST= ERTS_BEAM_SMP=${ERTS_BEAM_SMP:sh}
#ERTS_BEAM_SMP= cd ${LOCALBASE}/lib/erlang${ERLANG_VER} && find erts-*/bin/beam.smp

PLIST= ${WRKDIR}/PLIST # do-build will dynamically generate the PLIST after `mix release`
RELDIR= ${WRKSRC}/_build/${MIX_ENV}/rel/${ELIXIR_APP_NAME}

do-extract:
	mkdir ${WRKSRC}
	cp -Rp src/${ELIXIR_APP_NAME}/ ${WRKSRC}/

# poudriere needs ALLOW_NETWORKING_PACKAGES="ex_ample"
# to permit hex / rebar / deps network requests
do-build:
	cd ${WRKSRC} && ${MAKE_ENV} mix local.hex --force && ${MAKE_ENV} mix local.rebar --force && ${MAKE_ENV} mix deps.get && ${MAKE_ENV} mix release
	cd ${RELDIR} && find . -name '*.bat' -delete
#	cd ${RELDIR} && find . -name COOKIE -delete
	cd ${RELDIR} && find -s . -type f | sed -e 's/^\./libexec\/${ELIXIR_APP_NAME}/' | tee ${PLIST}

do-install:
	cp -Rp ${RELDIR} ${STAGEDIR}${PREFIX}/libexec/

.include <bsd.port.mk>

Added ports/patmaddox/elixir-bundled-app/pkg-descr.



>
1
This is a simple elixir app. The source code is bundled alongside the port.

Added ports/patmaddox/elixir-bundled-app/src/example_bundled_app/.formatter.exs.









>
>
>
>
1
2
3
4
# Used by "mix format"
[
  inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"]
]

Added ports/patmaddox/elixir-bundled-app/src/example_bundled_app/.gitignore.





















































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
# The directory Mix will write compiled artifacts to.
/_build/

# If you run "mix test --cover", coverage assets end up here.
/cover/

# The directory Mix downloads your dependencies sources to.
/deps/

# Where third-party dependencies like ExDoc output generated docs.
/doc/

# Ignore .fetch files in case you like to edit your project deps locally.
/.fetch

# If the VM crashes, it generates a dump, let's ignore it too.
erl_crash.dump

# Also ignore archive artifacts (built via "mix archive.build").
*.ez

# Ignore package tarball (built via "mix hex.build").
example_bundled_app-*.tar

# Temporary files, for example, from tests.
/tmp/

Added ports/patmaddox/elixir-bundled-app/src/example_bundled_app/README.md.











































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
# ExampleBundledApp

**TODO: Add description**

## Installation

If [available in Hex](https://hex.pm/docs/publish), the package can be installed
by adding `example_bundled_app` to your list of dependencies in `mix.exs`:

```elixir
def deps do
  [
    {:example_bundled_app, "~> 0.1.0"}
  ]
end
```

Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc)
and published on [HexDocs](https://hexdocs.pm). Once published, the docs can
be found at <https://hexdocs.pm/example_bundled_app>.

Added ports/patmaddox/elixir-bundled-app/src/example_bundled_app/lib/example_bundled_app.ex.





































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
defmodule ExampleBundledApp do
  @moduledoc """
  Documentation for `ExampleBundledApp`.
  """

  @doc """
  Hello world.

  ## Examples

      iex> ExampleBundledApp.hello()
      :world

  """
  def hello do
    :world
  end
end

Added ports/patmaddox/elixir-bundled-app/src/example_bundled_app/lib/example_bundled_app/application.ex.









































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
defmodule ExampleBundledApp.Application do
  # See https://hexdocs.pm/elixir/Application.html
  # for more information on OTP Applications
  @moduledoc false

  use Application

  @impl true
  def start(_type, _args) do
    children = [
      # Starts a worker by calling: ExampleBundledApp.Worker.start_link(arg)
      # {ExampleBundledApp.Worker, arg}
    ]

    # See https://hexdocs.pm/elixir/Supervisor.html
    # for other strategies and supported options
    opts = [strategy: :one_for_one, name: ExampleBundledApp.Supervisor]
    Supervisor.start_link(children, opts)
  end
end

Added ports/patmaddox/elixir-bundled-app/src/example_bundled_app/mix.exs.



























































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
defmodule ExampleBundledApp.MixProject do
  use Mix.Project

  def project do
    [
      app: :example_bundled_app,
      version: "0.1.0",
      elixir: "~> 1.14",
      start_permanent: Mix.env() == :prod,
      deps: deps()
    ]
  end

  # Run "mix help compile.app" to learn about applications.
  def application do
    [
      extra_applications: [:logger],
      mod: {ExampleBundledApp.Application, []}
    ]
  end

  # Run "mix help deps" to learn about dependencies.
  defp deps do
    [
      # {:dep_from_hexpm, "~> 0.3.0"},
      # {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
    ]
  end
end

Added ports/patmaddox/elixir-bundled-app/src/example_bundled_app/test/example_bundled_app_test.exs.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
defmodule ExampleBundledAppTest do
  use ExUnit.Case
  doctest ExampleBundledApp

  test "greets the world" do
    assert ExampleBundledApp.hello() == :world
  end
end

Added ports/patmaddox/elixir-bundled-app/src/example_bundled_app/test/test_helper.exs.



>
1
ExUnit.start()

Added ports/prod.

































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
benchmarks/bonnie++
databases/duckdb
databases/postgresql13-server
databases/postgresql14-server
databases/py-apache-arrow
databases/py-dbt-duckdb
databases/py-dbt-snowflake
databases/tcl-sqlite3
deskutils/i3lock
deskutils/just
devel/atf
devel/cmake-core
devel/epl.el
devel/fossil
devel/git
devel/gmake
devel/hs-git-annex
devel/magit
devel/nimble
devel/pkg-info.el
devel/py-awscli
devel/py-pip
devel/py-pipenv
devel/rubygem-irb
devel/tcllib
devel/uclcmd
editors/emacs
editors/emacs@nox
graphics/drm-kmod
irc/hexchat
lang/elixir
lang/elixir-mode.el
lang/go
lang/go121
lang/ldc
lang/nim
lang/python
lang/ruby31
lang/rust
lang/tcl-wrapper
lang/v
lang/zig
math/py-pandas
misc/freebsd-release-manifests
multimedia/webcamd
net-mgmt/alertmanager
net-mgmt/prometheus2
net/google-cloud-sdk
net/haproxy
net/rsync
ports-mgmt/modules2tuple
ports-mgmt/pkg
ports-mgmt/portlint
ports-mgmt/poudriere-devel
security/1password-client2
security/acme.sh
security/doas
security/gnupg
security/nmap
security/sudo
security/tailscale
sysutils/consul
sysutils/direnv
sysutils/fx
sysutils/htop
sysutils/mcelog
sysutils/memtester
sysutils/node_exporter
sysutils/password-store
sysutils/pstree
sysutils/sanoid
sysutils/terraform
sysutils/tmux
textproc/docproj
textproc/hs-pandoc
textproc/jq
textproc/markdown-mode.el
textproc/p5-ack
textproc/ripgrep
textproc/yaml-mode.el
www/deno
www/firefox-esr
www/nginx
www/thttpd
www/tinyproxy
x11-fonts/noto-emoji
x11-wm/i3
x11-wm/xfce4
x11/nvidia-driver
x11/nvidia-settings
x11/nvidia-xconfig
x11/xbacklight
x11/xfce4-screensaver
x11/xfce4-screenshooter-plugin
x11/xfce4-terminal
x11/xorg

Added ports/tree/net/google-cloud-sdk/Makefile.





















































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
PORTNAME=	google-cloud-sdk
PORTVERSION=	331.0.0
CATEGORIES=	net
MASTER_SITES=	https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/
DISTNAME=	google-cloud-sdk-${PORTVERSION}-linux-x86_64

MAINTAINER=	bofh@FreeBSD.org
COMMENT=	Google Cloud SDK for Google Cloud Platform
WWW=		https://developers.google.com/cloud/sdk/

LICENSE=	APACHE20
LICENSE_FILE=	${WRKSRC}/LICENSE

RUN_DEPENDS=	${PYTHON_PKGNAMEPREFIX}sqlite3>0:databases/py-sqlite3@${PY_FLAVOR}

USES=		python:3.7+

WRKSRC=		${WRKDIR}/google-cloud-sdk
NO_ARCH=	yes
NO_BUILD=	yes

PLIST_FILES=	bin/bq \
		bin/docker-credential-gcloud \
		bin/gcloud \
		bin/git-credential-gcloud.sh \
		bin/gsutil

OPTIONS_DEFINE=		BASH ZSH
OPTIONS_DEFAULT=	BASH ZSH

BASH_PLIST_FILES=	share/bash-completion/completions/gcloud
ZSH_PLIST_FILES=	share/zsh/site-functions/_gcloud

post-extract:
	@${RM} -r \
		${WRKSRC}/bin/anthoscli \
		${WRKSRC}/bin/kuberun \
		${WRKSRC}/bin/gcloud-crc32c \
		${WRKSRC}/install.bat \
		${WRKSRC}/platform/bundledpythonunix \
		${WRKSRC}/platform/gsutil/third_party/crcmod/docs/source/make.bat \
		${WRKSRC}/platform/gsutil/third_party/rsa/doc/make.bat \
		${WRKSRC}/rpm \
		${WRKSRC}/deb \
		${WRKSRC}/lib/third_party/grpc/_cython/cygrpc.so

post-patch:
	@${REINPLACE_CMD} -e 's|python2.7|PYTHON_CMD}|g' ${WRKSRC}/bin/bq ${WRKSRC}/bin/gsutil ${WRKSRC}/bin/docker-credential-gcloud ${WRKSRC}/bin/java_dev_appserver.sh ${WRKSRC}/bin/git-credential-gcloud.sh ${WRKSRC}/bin/gcloud
	@${RM} ${WRKSRC}/bin/*.bak ${WRKSRC}/bin/*.orig

do-install:
	${RM} -r ${WRKSRC}/help ${WRKSRC}/.install/.download
	( cd ${WRKSRC} ; ${COPYTREE_SHARE} . ${STAGEDIR}${PREFIX}/google-cloud-sdk )
.for x in bq docker-credential-gcloud gcloud git-credential-gcloud.sh gsutil
	${CHMOD} +x ${STAGEDIR}${PREFIX}/google-cloud-sdk/bin/${x}
	${RLN} ${STAGEDIR}${PREFIX}/google-cloud-sdk/bin/${x} \
		${STAGEDIR}${PREFIX}/bin/${x}
.endfor

post-install:
	@(cd ${STAGEDIR}${PREFIX} && \
		${FIND} -s google-cloud-sdk -type f -o -type l >> ${TMPPLIST})

post-install-BASH-on:
	@${MKDIR} ${STAGEDIR}${PREFIX}/share/bash-completion/completions
	${RLN} ${STAGEDIR}${PREFIX}/google-cloud-sdk/completion.bash.inc \
		${STAGEDIR}${PREFIX}/share/bash-completion/completions/gcloud

post-install-ZSH-on:
	@${MKDIR} ${STAGEDIR}${PREFIX}/share/zsh/site-functions
	${RLN} ${STAGEDIR}${PREFIX}/google-cloud-sdk/completion.zsh.inc \
		${STAGEDIR}${PREFIX}/share/zsh/site-functions/_gcloud

.include <bsd.port.mk>

Added ports/tree/net/google-cloud-sdk/distinfo.







>
>
>
1
2
3
TIMESTAMP = 1685561747
SHA256 (google-cloud-sdk-331.0.0-linux-x86_64.tar.gz) = f90c2df5bd0b3498d7e33112f17439eead8c94ae7d60a1cab0091de0eee62c16
SIZE (google-cloud-sdk-331.0.0-linux-x86_64.tar.gz) = 112634068

Added ports/tree/net/google-cloud-sdk/pkg-descr.







>
>
>
1
2
3
Google Cloud SDK contains tools and libraries that enable you to easily create
and manage resources on Google Cloud Platform, including App Engine, Compute
Engine, Cloud Storage, BigQuery, Cloud SQL, and Cloud DNS.

Added ports/tree/sysutils/jectl/Makefile.

































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
PORTNAME= jectl
PORTVERSION=0.0.1
CATEGORIES= sysutils

MAKE_ARGS= -m /usr/src/share/mk

USE_GITHUB= yes
GH_ACCOUNT= KlaraSystems
GH_TAGNAME= 68fa5ef9ab5e221f378a88f92c0b317ddd10d315

PLIST_FILES= sbin/jectl

do-install:
	${INSTALL_PROGRAM} ${WRKSRC}/${PORTNAME} ${STAGEDIR}${PREFIX}/sbin

.include <bsd.port.mk>

Added ports/tree/sysutils/jectl/distinfo.







>
>
>
1
2
3
TIMESTAMP = 1673403705
SHA256 (KlaraSystems-jectl-0.0.1-68fa5ef9ab5e221f378a88f92c0b317ddd10d315_GH0.tar.gz) = f4ec58cf78fce43bd71a6476eda371e4bec62b9dc97face440d6ca7259b2678b
SIZE (KlaraSystems-jectl-0.0.1-68fa5ef9ab5e221f378a88f92c0b317ddd10d315_GH0.tar.gz) = 12048

Added ports/tree/sysutils/jectl/pkg-descr.



>
1
jail environment control

Added priv/.gitkeep.

Added scraps/advent2023/01/Kyuafile.











>
>
>
>
>
1
2
3
4
5
syntax(2)

test_suite('01')

atf_test_program{name='test.sh'}

Added scraps/advent2023/01/aoc-input.

















































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
8eight1
98126
fourfourthreehnbhkmscqxdfksg64bvpppznkh
8fivenvvtrlj
six3zbhvrfhsevennine
427nine6chnqrssxfour
threevvxhvx38rktdqm3
eightzgqzr3eight
xgjjmnlvznf2nineltmsevennine6
ninethree15seven
37cjnsfbfkqpkxpdvgk8
sgeightwo3
9sbxg
1spnthree59ninejjgjdlx
six9six2gxmn
twothreefdbl6five3zcqvcqxkcvdfkl4
46six47seven27one
four15
7sixvmsrrzqnngonethree
3fvsghvkqkbfivenine
zhthfghk6ppc48bdx6two
1fouronesixsvhbglmvxx
7lcsixmzmseven
two37sixccrpqngpvthree
86fivelqmlmbnhq
bvcz4
6fivendr7seven31nine4
jmkhtgklhpxxblc2slhlnfkoneonefour
8smpnsc29threesix
r795vgnhqhs92one
eightfive6
4threeqzslpcvkpfdjh
8cthp65
twoonenine87
dzttdmpfxtnine6onefourone3vnnz
8tnnfrsdffpthree67xh
seven5seven27hf
3sevenone
seven9foursskfhnchhf481
vrfbrcsgp81vsztr
six9dnqqgxtvrngpz7879two
lkvzzdxmr71lglsixfour
two6jkpz9
1sevenpmjfv
nlvrdmdhxsix7three7frmdlckfjninesix
gfb89qrhtzsfvbnpbp6two
f8mbbtzjvrqkrszlgrlzrlrgmbmmrdqv8seven
sphkkpptpvjzbflkzz2vbndc3foureighttbn
eightnine5zpbzhltpmsxrggdcmbrzkvczqbfsevendtjsr
2snpbrhpnine3sevensevenhftpqgk5
seventjxr91sevenseveneightwojf
1hfkvhjxrtjfivefive
3fxgmlxtwo
stwoeighttwo3
eight45t
sixthreecdhbfive3sevensrskjm
fiveklmj94sixlhpfztlk95
fivefive6
fiventjm7tndhd57
teightwo87nine83
6jskjmxrttwo93four7
dftzgsdc19threesevennine3twonevl
zmpcgkonethree1cspnkkpjr9
hmsfrjtxpgn9dqkr9dqvrtq4
3eightmfggjleight
qfglzvvrrtpfl4
7eightonefpqq3
tgtbsmpftwo4hvnvsixfrdqbvqbhspcczfhvqcslvfszvrtzbks
twofivesix34nine
dhlhkjcjsbgnxd6sgrdnzchtxtplltbqlk
oneonegcndlfourdngmbgdeightsix2
twotj9l8onetwoned
jpz2fourjgrjzf44
9fivesixmlqr8rxqpvpfzfive2nine
6xkfkrfnine196
four7gppqtzvbftzcbzt8nqq1nineqdlrff
2threekxgcqjzt4qdbthmjx
three61eightrnkckq
9knfvqgmdsmdqlkfzl
mxsqv6twojgmhpmlfq
7fouronelngndbsmljbfzmsix
fjm8fdpnlldlgvkqdcdcdjpxzdt
7five2
tsp9seven
eight2hzpkpqrjgcrnxksix
jfourcgjbpzbgdn61dzqbgthree
7threefive
1ncmzhqghthree3sixllvknhf
one5six9
krzkzmbccr1
74mqnzmqnf6
four9pfhjktmvxtwo
6bl21bpx2
4threefdpfqck
jjksjbsjlgdonefour5
2hbthreegfh1
threegzbn1four4hbqkmtmoneightrhg
fiveeightnine83
3five49jvrhvmnjmmzlzmhpbpsfpn4
3vvhshsixone3
55three7svtlzzggkqjhjx
fxdmnjsvpjbg69gcknlzgrqhqdsxvk3p
eightfcnmbzl2jhlklgmlmsevenkjbgzhstwo
6sevengdmtlfjmns63
ktgqthreedfmrpdvnkfvs3jsgcvtdkh
4seven1
34onexkninex6pnldxrfs
threenine3five9eightrvg9
xqbnmtrlsjninenine57rpcsjpqfdcbd6
216two
xhrdmfcvjzt4sixfkjsvkfhtldfqckj
2six8zeight1
cmpklgjgf19twoqstfhphbxxtfdgj2
lvgqnkqhltwo9r
twosevenfive2rgfsgmzqjbnine
7ddpjxmfschtqdgnjqc18one
oneninefiveeight6
qs4five5
rpxbcff9vpmlvvrlonefourdnczqzjbvnine3
7592r3qjtwothree
sixtwoninegzrfp9hq6six9oneightp
f8seven1dcrjqchxchfour8
bnhmfmsbzbhfive1rfqqkzkptjnnxpone2jqhrfnvkfk
qlhpvzdkcfsq84
dfour5one
hcvxnfm32kpzxkhvmvkjsevenllpjpgdhl
dlhhnk2ctvxqmbmsc9five
one7sevenninesix
two4rkb5sixtx17
28three7
msixeightnqmtfmtftsix9
zff8nineflfpz1six1
vk9
86ninelzskddf9784
mrntnhjrvkjgmntone715ktgb7
sevensixthreegkldpptbmteightsix2nfnljvkfkgc
nineonenine3ssnjhkhdplvq3six
jrdgsz8
5nine6bglkndgfnineq
nineone8seven5zhdd2eight
17qjzvtwo
4rncnvtp5ssznhncdnnzndr9
6srf
5two6xxkzdrbfsix
five314sevenseven
eightsix7lhfqone
gdz3one
6sdplqg
rsnnm3twoqlxjjvzeightvxszxhmnck
7onelvrtdktmcmrm55
1eightzvcgfqgfh
ssgrzpb6jntxrdhvmrkeight
5oneninetwo
gjktwopxxql4gxfourseven9
four55sevenvglcrqsdt
nsqrjfhs2eight
seventhree5g9
threehvmrmvglvvsix7pqxzfjvv
cfgfmgg5vmqnnqrxdxone35
two2fxsmdjmb
rrlxrkjkxmfrbvxsevenrlllvclhvvfourlp4seven
djdoneightthree82eight6five6
two4threecdbgnmzvdd53oneeightkmx
one5qdtnrpcmrnnsbeighttwotwoninegtpv
86htqkfrkmone217
onelqd5jvlbm4
eightone3rpnkglzfcveight12one
fourqfxndmpgbqrcpclbnn86hxsxqrrkjcgtgxbdxjdjlzcrhkp
1one18tjcsjzpmzmpmstzmcn9
threesevenpgt3
four9sixthree1
twoninethreesxxqgqlqmqxkmkl2qdrqpftwo
seven678
two29six958kqxq3
dbpjzgdrhnsixfjvs9eightsjdgtckdtsjmhplkjeightwodd
7fourninenine
fourthreedlhf9rf
mjkvfivekltmfourone2
7twozrrtg498one
4eightcxnx
one62eightdktgeight2five
bnpxcdxpzp4tklvsm
d8sixninegrbx
threefourfivetwo23
nine3onemqtbrhlp8jqslv66ssjrkh
1four9fournine2
xfsl83two4
65twonine
1twofivevctxpfvr18
819gc4six79
27vtvmsix1phzrone2nine
boneight1mskkndkgfive
1xmpkmg3
one8sixthree9zjtmqjjjtc1nine
six36gqjctdsevenlnmmbr7dplrsdptwo
8jjvgmffgxr5
fivehgkc5
2qmbvphfivexggrxljhj9qmjrzmrx8eight
3rndntwoseven
boneight6tworgxffournine
htbvm1grtsgdgbl
1hkndcqv
32sl9six9
894
vrhxnstqqmflm6glgfnpfour
5ninekrfivesix8
5six5
eightpdzjhvbgm5oneonetwothmnkflz
vnfttnhcs892rmpvjvqvfftmzlkcrzthkbhfour2
9twoeight
73553bkzzhq
fvxmvs47
3bhmjpqvzs
pcftqc671
sixtwo8
threesdhhzmxtzpsfgs286nmqpvrtkbktcblqqb
7pmhlfcmdxthree11gxpncfqntjtwoone
8fivel3sevenxqtpxjntbjbtxhxtqdfnfour
6seven72zzseven
seven9kcthreetwo2five7br
kvkhl1fzvpseven
5oneoneeighteight6
lvfx6onefour
ftjzq4
slbgthc5xsxvplbskk
64nkjjvrmsqzdone
twoonelgcnrmrm52
gnfmrdkzoneseven7twotxgsjsix1one
1xlrpbbdbdgftkd
8jmphpvclbf87two
724threesixthreetwozsix
bcxdnrd91cprdfcqfive
zxjcgbhfr3
scfklbckxjggbgz9txqnzf6
1twohctsdfkgsdteight2
7threethree
jdj9
99
sixfzv24nine2bz
onefour1frf79tcfjchqeight
onethreenkgjgtpxfkmncvmsqs2bsqppgg
fourfourtksqrmnmqfour351lrsnv
6559hthdpeightkfive3
sixone7
eightoneeightjcsggnqthree5qqmp
sixeightonegltz7
three4plz3kls
8sixmssvtthreeseventhreenskzq6
tbzxjqxkspseven9dtbqvchseven8mbpznrrh1tdmqs
1b7eight
thk23eightfivekvzphvmsfour
7cnlrjzvk66threefxzrtgrbvmbp
bhvponeonefouronedcstbnl8
eightninesix4fourbrpfp
four29eight
kqh5zkgqphzjvz
4zvh
fivesixnzbsvfjhj2
rkrnbpsqjdthreez78mg
eight4gzxfdjpnjnskzghlzfdmjhz
seven8fourxzdmlcq1mnine7qbblrqtrsq
gd369
hgtqgfive8
pcbgddgvvzpmone4onethreeoneonefive
95nnjhmhrqgkkstwo66
723sixone9threefiveone
threekfvqsscfjr8
3vzltxllq65jvthree
4977d
35pgnlsjjpbgseven33
g25twonineeight
three8eight3six
8five8vjnzglnrbsbxmjqzfvrsoneightlpx
3bksdkncgtmdrlvkgboneffkszhfive8
five1hfh6
fourfoursix2nine9t
6onetkhqqonetwo
26hdfljh8
6onethreefvgnoneoneninefour1
jghct6rzcbjsfivefourtsrbsjkgmtfive
nine93eightxqknsqhrmjthree2
fivenine44sixsixlxdkpm
threeeight16jjhdkbhlninekvdvlpbq
onethpbzthhlqxnlrmtwo8
seveneight1eight
5two8
4four8vdpmqvf3onenmfg9
eight9jtxgdzdrtwovznpmnqvnnlffsix
456four
9hxkq
713kthreexfbdrtbx
1five59
1mxpmfive63
sixfour1ffivezjj
97eightthreecjnxblrnine
kgqcxfzc4sixnineoneone3eight
9fbklklqqsfourbfhmfmgnbtls3three
xxjnbdxzeightjqvvd82kfchrthmgkfive
zfdeightwokbq3seven
6four696ninezfhrbg
zrznfshdvlxcstqtghjnjvxq1dlklzghbh
steightwoxfbfzlnzvlj7919four
nine48oneeightseven89five
ninesxtdnjkmg7rn32qdljqprj3
five6xkcg1qmpkddfbsnlhzltlqqjr8
99sgptmns4five
2ninejdstk
gvclfcg9
n9lmv
jrftwo3
sxrxlnvdhr47dppck
jvtwonesixlzdshrfjtzgqddsix7
7peightoneeightwobsc
three6txdjgqqmsdeightcxmljmmzxksseven
fqpx9three96xrvjvjjnrhknsvfour
9fournjmslzbmx34cnt
f3twovfc
shbtwone3kzdsdrxgr
4five1tfhvd27
rdccbvkr9gjsmxhfdzgvjcbkdmsdeightlvjkdgtdqmbvscvhpmdz
pzsqhqthreetwo1qxfbhreight
threeeightxonenineninepmjfgzsdhd1
nine7sevenone
hbsqqtlqcxkjjccmmpv5
6nineltr5plmbgzfqbxninerxzbzrblmfsmjggvr
5eightwolxp
5cscpfqm
four3nthbmbxfbvtrqqkfmltmtjh
pvxtx89jp9onesevennbxmjlzmfk
threetwoeightzcgkbv1
7rxxfpgvl3eight8dmhkf
nine6two
tpnr21five1eight
ktsx6threeseven
8four31drvszglmv
fgkmbcgsrjgs6eightfivefqqdksrndrtdbsxgmfgtggmrc
sixnineninefzvthdq5tkzrhrgdqblzndmv
7lfdrntqsm
seven8dgxgqvgvmtdkhqsfkhtqcjpvqlfnine
znltfour6twopsdmcthree
oneeightvvvxxptmk4eight2n
5six1jmmqtknpveight
eightsixxxpbvqhpknz27mqxvf
rbdchcmqhseightnine9
944five8gxsjf98
smdhcnbmone9
one64six
seven4fivesevenfournine
fivetjt27qqxncqcone
five49655onet5
mlqgmhfgsix3two2
jxdfkbvdgnjqznineh1rdqsv7
fqdvcktwoeightsixsevenkkczb3
3krndk27
qjpmscfqf2gzgvzgksc
5zs11fourqqzfour
5h
fourtjzxtgrtd2twokrkpfm
fourgxprrlvmhgpggsmzbone7
ldbprdzrqkgqkx23pjvgrc6
ninevfh2seven4four
23gfmpvvsrjr4vlntkxkeight
sevensixonefivefour9two
vmchfrmfntwo3sv1twoonenngblvmjsf9
sqhzppseven79kzdppfnk8foureighttwo
1ljltwo8xhszxqmtponethreesevendkx
4fourfive
5fpzhcd7pmbcxcrkp
cdhqgkhfive61seven
eightqtzrscqhr3
brrftwothreethree1
r145pbthqtvxd63
ninebdnxdvchzf57oneightpp
7one1three68
five8nknine341msfflpnz
sixxmsevennm6
2n5cdvvtghfg2lmfrbjbvtwothreefour
39one16
l3two
xsmtghfiveonesix6lkzfgq4threethree
6onettqbfp
nhxlqqp3vkcgsjgkgmjrpll
xgbzrtkbthbfour7mptxqnbrkvhninetwo
7foureight55cvpgsvsr
3twonine
gsqhbhfrzfour9twovjjbskdonegseven4
2fourthreedcxrcmjmkprdsbone5one
519955six
two8fjgvtwo7vnttwo5qzlgxcfkz
rvk4eightthree
9six6
clpxqfn3kcn3
1pzbkzdn9sixxlq89tcnm
nineninetwogtsjfour2twollrrllvk
three5ql7hdjkxdbrcqsfouronekqhc
ltgeightwothree5ccxbhssxrsbj
42three3
dkmm8eightnine4five
vqmvzpb9tmmsfgscfive6mtjjbleightfour
ms39zmtbptwofive
4xxxkdxlhjjx
6p
threejfqfjhxp9txlrlrbczdxzpmrkz9sevensjlgstbpffhtb
three2zqtrtpzvsffdkjsevencjmrdqthree6three
two681
fourlhcsspzf5fivezgzkclbrdrqdjcksfive8
ss3
znrzctdlgjpzszsixzssrfpcvhlbpg8spfxsgbctpttbxxxsc
mjfsxjtthljlzzjlxspdsgsstltmlxf6
48zvpktkpk3
7khsdbtp43rgggnxf
2ninedkttzmpqddsix
xjcqvkcnbpbqsmclfnsdbnnine3eightfive
7eight6
eight8zqcdbzqfour3
one8eight27
ninemmheight3jdkktmthree
oneeight3lbsrnqrn
8ninefive71
hd58sixtmdj
fivesevenfourtwo4
fivecdsvgkfgbntwo42
593jxn
sevenfxkvdxfjgskjhhphnjrgfhs8sixcnine2
fourtwo2sixthreefive7rq4
threesixdhlkvz3fvdslkbs3
nine3fkrzbjbmjbsr
nine7seven36
sevenbnncfhrnzq52eight6lvfdmqhxkdqjl6
zshsmpsmbzpnfthree1five72fivetwo
threenine59seventkthreeflgkvtp
nine9nine6
8five4foursix4
jnz1m3fmzzgt9kbtpr
81btdvrcspvfour
three1smvpjdnhveight1sevenfour5rqkqvq
49fiveeightonefive
2xmdtnineczgbqmhfivef1six
638nine
eightsix6twooneseven
jgtwone6gzchdrkts
93tsrvf48
1qkrdfhpseightninelddrnffp2gc
nqvkkprztqsqfk74threeseven5six
14two
dmhnlgxqpjxxvpshqt2sevenplzbgvsz6
bffour72
zgkgvone8bxsnnlninegmjtnsqtdp
19oneninezm88blmdhbcrns
2fivessqk1fonevghgnz
ddszqsldhxpsbprdbqkmhs6ztkbzbkm8cddpqm2
7eight5xhdtpfive6mbf
qxcnnmdt5vctqqhmvrmqblsixfour72
6onetworpstxdq3tvrcmssmkfhbr
7twoeight1
64two3sixsixckbkshnkjmtwo
7csixonegdbrvreight
bfdptjcjdq4five7sevenfdhhqhseveneight
8one8dzbninethrmdldmpn
three7xghxhr1threerqfxseven
two9xrhvzkgcck3vfour
5lvtrgmpttk3fourfour
five581cjndb
3clp81dhzcbzxc
threesixsixbtst27
fivesixnines146
gppzks5cqggtq
4rgdzsnqlcxone8vninegkgqh5fcbsfl
jfpdpfcccrjpcfive2threeseven
ninefoursix6sixtwooneone7
twothree3njhxgc5rhntgnckg
3twopvtmvrcrnr238
k4mgzqbnkrthreeseven9
bzkoneight2jxllsevennmhxpgbdkfive
886svgcltdl222
sevenqjgkjdfrrskhklvrg9fourtwofive
cdsfdkdsvtcjhzdpqdgtwo454five
lmksxbv8foureightbxskfblhllsdrxfs4v6
49sixfour9nine15two
75mtcmthreelvmsksnine9
foureight849zgl69
92eight6ninezjfive
8onejsfvnjktqksixlfnxqrjmdvt8vhttwo
48three
onesixcksvcdeight3tr
hvxqb1fourseven4
four755grdsixthree
seventwokzpljzth6mzkvsdbgldfbdc
ninexjgzkcxl3sevenfive
949klmgj3
53threethree
lzczjzsftmcmclqqbrgjftfhxz3
9rpzfnrseven3tsqxxgtrsq
fcndgzmtsj86psclvsdvggbhfhd8five
qxkd2lxzhhcjlxonel48ndktqt4
2five5twovplrbfxfjzvmdvjs96
4drtzsix5phvgbqjsknnine9fk
btmdjvnkrqxvjzchkkdpqcnldljs7eight6znfphg
xtrjonefour8threexksbbvkxmkthree
sixhtxkz7pnfzsnpnxnine79
eight5826
hrxqbqq5sixkrdjszbblnpdhfour
xlcmcgprrp2
seven4vfm3
bztngjjhfivethreenineeight5
oneonedvgbrpgqrnh77
jfnphpvcggfour5zoneeighttjxhcqhsml
796fcone4
7fivetrmdsb
2fiveseven
fmtdrcczngfivefour6fiveqbkn
eight25lxjdzkrrqj
fiveninefour373
fournr2kzbls1tghseven
3nlmhvbninenine1fourpppthree1
j4fourtwo8
one2f1oneightc
8g2six8
nineqzlsxdktwokdhsevenpkqqhjb4jz
cqsnztq6hjvkcnpxhmplfxfv
6svktmthzeight1nfxldggeightqfmhhzzsdthreesix
five7six9
thlhmgvvrmqv9
fourpvksthrjlxxdddseighteight55
qsccghmztwosevengcrfqnzzmrhbgrxlrsxqxtmthbv7three1
onetwoj19eight1
8xgtltlrbnscvtone5nine4
ftx5
knsvnknplxrgtnrlqcl5
nflmxkm2dznjhp7three8
five2tttpdfnmdx3fdrtmxzqx38three
two3onegkj
47fivekd
six9one5nine
l3onegpj8threekkgdkfzscrrx
four4ninekfndgvhrrbfzpzrnvpzlxss2l
ltgsxcrgzqrfpkkb3oneeightfive91knzsppbbx
sevenvplfzkncmzninenine1
sixhjjsxgrkgstwoeight1knhgsmscrnpcsone4
1jbm89two63two
onedmpxhvvcjssixeightkpvdmzjltwo3eight
fivevjjtfeight6nine6
thffrjkhsixsix1one7one
dbpvngh3eighteight86foursix
pppcpjmbmhsvhjmkfour2onedlnrg
2onev8
55gpvxsvsnpfrpmn
lfsmfbone58
tv48threetf3
8threeonefour71
ghfslsb73dtlgjnj
565twofive7
sixljhfccjg5eighthpskg
5four2four988rd
xtgtb99
kkzfxjszrfjvqllzlhzsninehzbfbvnhtzjf2
165
3jtklkpsfxpxlrgltwojxcpqjdsfbs
three7vknxbtthree
7fourfourfive
sevengseven71nklkzdeightsevenfive
eight37one
onenine2
8nine9vkxxhnm6threethree
7one96fivesix
58bjnfhjxsm146six2mzv
99jdqbtb5seven
8mqztm7bdjh4
6ninevninenvxflxvgg
k2twonek
gzmtwokrk9ptrsixfjbktjc9
8fivesix
onesqqkvdtrs5vrsbgnvkjseven
bnbxpqhqxthree79
nine5l96ninesixqhtxpkzb6
sevenfive8tklnkqtwo6three
qb2onenldzvsixdjmjzdfourqgjsssr
thqmrjnjg3dbnksktvk
318
three71blglscfrzql3six
9nine6nine4
sfive1seven1m
five3dj16
5ninetwotwo8srrvkx3
spcsttljpl7
6rslsdkfs4834
dshfour8knjplkghpfpzcszqsix1
ninephtlxnnl8hzn
bmr5
tm4czfdfbhps
sevenseven4three9sixtsmg
sevenprdqm1six47cnbskdfivethree
7one6gzfdvfvfourtwoninelbndhdkqeight
jzxczvgjgxdfrzn2nineseven
zpcspqvhbfxlcgt1onesevenqktvgjhm
bmzdhnljms4nkjzhkrtfzngbn
nine6mjfbftpcfh4five9eight
qrvsldss4nine4
lffkvnhbpbtnxzncjs4
76ckghqnbsnlnjftwo72
7jrqrkdksixsixtwonett
3xc
mzveightwofive2onesix7five2
74five6mrqx464
qtwoqlrkvlmpqfjvm8onetwo
41bchstnlzkntwonineseven1pskh
twoonejtjph15three
fzmkkgdq6
21one
eight59fivenine
jbfmfbseightmzlknl84
four4rlzplmmmcplfour4mllmtxx1
q349j
6dbpqgttninepx623bpbhsnxcmg
52hvsvpk
8fivesevengzz
3fourfnfldsxngrknxnvts
18s
three2561b1onethree
zlppzxzzsixtwo4hksclkzhtkdsldfgjhtfhxcdsmdbr
26eight65five9threeeight
seven178
fvljxzhdln11eight
49four6fourqmspqkbphjzvgzgbvrbmjfsm
onelnmplhjtjtfivesqzn7qmcpchjtxfcc
hd8
9pnneight
five1qgdzkj9eight
rfhbnhbkzteightfivesixtwo3
bczljcceight2
145sevenhmfive
8lbgkvdxlccgzzvvttcfgmthreehhmqfivexcdhdd
6bqxzzzgpdk48
twobcxchbfour5m2two1seven
twotwotwo6513fg
48one6tmjxclchtnpltmjkpmmm
vhkcklgdhk9xmvtbktdvjmhjtwoned
eightfourfour9one7
fourqhgjdpr4p3sevennine
vlctnqfour4724
9spbtvbsh7scvvx7hvbxlh
eightone1ccdhppbsninejfktrxst
448fourhjk3r3mhstwones
hbgfourtkvg87sixlcvtjz
fivesphnkzrn5threeninefive27five
92two95seven5
twosix84zjqtp
9bxppvsjgdmbqvkstrqdsixtwofourfour
fourfive3sixnine9gmrdhfhkjtrnbl8
gxcdvsjxgll46mrrdfpdkdl
4foursevenzklvhrdvpj
seven1rz24rfmnjxjkthreenfvjdsjtgqkd
2xbfivelpfhvhsttgjqfsqkffctppeight
6vxfjbssixfkjqsdh83xgzdmtntgm
5four98two
xdk6
ptxtvnlpxbpkxptbhxheightfour7
6four5sqgfour
eight8rbngtqpvninelzhffxpmtwoonefourfour
three1eight2
1two8nine
9eight82
5fourshdfivefpfd3
two667dsmv3lvhszg4
kcxpmrv99threeone
seven7five3ninerhjbmtfbzsevenfive
4four1threegvxkdsfour
qdqrgf6fourfour46
7nlztdphcjdnqm4zddjhfreight
sixthree62
vs86four
sssbdgzsjnlftms8
3ninethree45twofsfsvtnl3three
four7zcfjtngsninethreeninethree
gcjxkcnsevenfivefourhrxzvxftnine4sixzm
thsdcone23bvrfour71five
fiveeight5fourrlhjtmrtwo36
61lxhrpfvbqkd
55three5seven1
sixsix9
gklpxzmcmnnbhsgeight63
fourninelxm5cpkqvbpbsvjndn4
hcjghqfssevenxzdljnvqsix1sjvrhxxhxthreesix
rtfttcqmxszrsixtwo37scglpjfjt5
eighthvbsldnhfzxr7eightsrmqhsgonebx
zcvqxjn1vdddlsevenninefourninethree
qr7three3two4tdrqd
qbjttlfsrrrhrmkhpvgc7zkone
6fiveseven7ndfhzxzrrf
fkxhpzb568eightspdxfngrp82
seven4sixtwo
nineddhgjn4
37chjlxcbbjrjghzjdsix4threeone
jhngdhntvfmcbjjvlgx979
9thpnx12835one
onesixfzjgpfourseven1nine2jrxjp
5rzvhjqnlvnt1lone
9dmftktf9
eight5eight
grkfgrhnkjm9
23ksknkqrdthreecvg2rbxkkhlqttfivefive
3foureightshnff5six72
19ninethreebcgxkmjqhgpmfx3
28rdsm5fiveeightthree
dmgpjdzfl1two819
pbrvnfpftb3vptbqncmqszbbctnj363
fzdc29331
bnkjk9m289rxgqn
fivenine4seven6plsczmxr4
fourmxrltjgdleight1three
nvjxvpgtplgszgbeightcccxxjhbsfqdrj7
5stjbvxglkdrbp6kllbbnptdfzl
dmtpxndvvrbksfournine5one6
vnlhxcssevenjlfgfournine69hmnddrthree
blchmddjmktwo4fxsqlpfn69xhbpblrlmone
17qdxzqlpllfourszcpxmtcxhrqzqqhm45
fchdhsjzt6two9
srs3vcq7three
twotwofour4seven
three4six8lsdg1
1fdvdhxrxmmvzonehmkzspfdlrdvlnfzfive9
7threeone
96ninegjcxpstqhxbdzfsrczm2
xthree19
bgptbh73sixfour
eight82
sevenb3sixthree9zjflmmdjfour
kcninefour2ninetzqgrztwojq
eightfourhfqttttxteight5
zghnvzdpfivevvgfjzfzvcbvccmtwo5tkzj7
33qxzctgqsvxnine9
6onejdndbcjsseven
294xqcrnine3844
two6ssqvnvm889
rzfbhbr15jpnfqmzthreepnine41
eightjlltprrrtv63twoptg4
threetwolzdbgcpkm3seven1five
mcbjrtbtbp7twoseven7vdttjvmxbxkfmgmvhrs
three5hfivesevenqrc6sbst
eightdvvtsvmfqeightfz5six
kklpnghzsfv88two86eightplfgjz
bcgntz6gqxbxxtnnonegqlz4fivexmjxqf
mvbgjpsmhntf7mmshslgvv17thlknqfivel
fourmjflcbtwo7fivetdntjstntv8
89fctlsxz8eight
1twosix57sevenfive
3ninefivencdgmlqxlnine96
twofoursevensixeightpmpfpfljqtnm9six
5hqnxvlmnh1bxxhnpkjhgzxlhgffv
nine9fourtwo8tzseven
grcfkddgseven55g
seven8nineeight4one7lnntwonebc
one1ktgggr
76eighttwoqzfnllqnkq
cgjkzlksl1823749
9twonvhlhrlxnine7eight
qtzmndkrqqcpmjcxz98kklrtjgbbnzglstnnlnqksrgkzz
cqxknrmb5cfhfgpk
9lhrnlqrnninethree
ftlpbv55nine
2nine5tfqhqtwo
mqcfgssixtwofour6cvncgmxnlq
946eight84kvflb
h1nineglvgmh9txxg8
lxqqqdfive5mcgrjkxmlzdqbkldjqkhlxnbg
jjkvdzzkpxqp1three59dqthvgkfmfjvlvnrtlzlmqvcspjfc
sixfoursix87oneoneseven
1ptrtgd6
5threeeightqqjrmchfoureight
one42mdmfkrzln5ck8qxbfqf
seven819ninejzrddsevensix
vfpeight4pngqpbvsbz
threesixseven1
7twoonetwo9eight88slpctv
fqfsixtwofpm14seven
6xcdvsftlsrsrgseven5seven
mhmgzdgrc6lnnzv6pnr4stxnmrreightblzp
9glsqqcrttdgsdqkfour
82two
ftqzqnlddctlxmb3rdlg
sevencxbpxvznqmjqgglfccqkvjqmjbpthreenineeightnhszbvnff7
twosixplgdjplfthree9kvhvnzzqzk
1pfvkfblrmz
2n
threeeighteighttwo7qmlnjx
6sevendhc6
nineklzvllx3fivesix
2nrrx5fourfcljkdhpk
eightone9eight
gqhsmccbpjc46twooneighttbv
sixtwo2bxpdqfour5fivexkjxzb8
six4nineggtqsxjd16mhgpjflprmsix
tndgkrpmfour1foursevensflkzt
bpxtdkrcr5nsevendltgfgkzxdtbxsghkkninesixone
one22threefivetwo
lzq7fzbqsd49
cgthgcsix1
389seven
cmgctmtwo8eight
2qvrbrnvp559
eightjpkjnpgplseven7oneightdv
cdzoneightsevensixeightmfjsevenk2eightfour
2sgtftmrrlfhgqxdmhtfqncslmnlvdhnjpdtpg45
fiveone1
ps2threeeight9hvmjrc3
pdsr2xphzmmztnqxpzq
nineeight7ninevhlksv
jjbsm55fscpfpklq1sixseven
fivefive1nkpzcxrxbfour
fivesixdkxvbtzvtlsevensix3fivekh
twovrzxlmnxnhonedpdmvn9
hmqccxh6
five6nine8vcbzzqrvn29
5nfmfthree4nthreenine5four
kpsbqshc21dzrpktfseven
cdfsszrtpnsfhgsix9bzqjdltbtzjsz
twofive4eighttxjxoneeightninet
six4tppbxfiveblktrzgdvks1
eightzmvmdbbfnft2
7npgl9vvtdl2
3twoone
seventhree6skl
onepdfjql18nine631
jpjtvxck3sncsvnsix112
njrkcgznhvfour1
gfmtkdr2pvvlnh9one2qgvmxfm
4jfxzmcdonep
one9threenine
544
one5sevenzhmljdbfb8pncqtdkch
nine72eightxvjtrbzpzvcqvlone
sixfive8hpnvvjzld4
24f8
6bqtwothreectlcqkkzfiveppvbgbvspncldjhb
ljmrlhxvsone1qdsfhqqqlljd6seven2one
mfksxgxhrqxbtg73r1eight
ztwogdpcgllnpp516ztdkj
two8fivefourbs5jlzfnleightwoqvk
seven5qnrvpqtrvlt4
ninedbnd8mrchlnineqhhqjnznts
onethreezmhshprm2jb98three
3sixfivesix
two6dfgpzqrhzp8z368
twoonesixfscgflrlkmbtmv5893
fiveninegkfbgczqjhnine7g
rbvknineflmkl4btbqbkpsd1eight4
39324
sjrgxtxppvxsseveneight4fourkdcdsmfhnnqjpbtxg
6threevcgjmclnhcfxvvbncnhjkmmsqqhqffivetwo4
foursix1four
zbztwoxptgdffourgzgzzhhmhg78ljlchqh
dcpbt25vcdf3three611
v58dtgrpl
rntgtrrc67mdlfxzgkvjjpqtfournineninefour
lpcts8vjznknlj
bpmv3
5z
zmsevenbnckdjhpbsvglrfsskcsevenfour2nine
6threesevenprcjlk5644
nineczcd6922
2vksfctzbjz
kz93
twopfhlqplngsj6z13four
8nm3sevenxqzj
cjgoneightthree5kmpndvrmkljeight6sflmeight
three3b3seven55xgrrkssnzsghd
5lvxlnpc24fbrbtg5
7qgmhfnine
sixpbvfjzkdsqdpsthreekj91nine
sbmxhcxonefggt4f95oneseven
onemgkvjpgfdjmhcsxsmlfjgzqqrftgxgx2jdmbvbfdf4
bvzcskfrlg8jseight
hzltjqmxjt98eightfseven
2seven7vbfm44four
3bvfnine
4mqprdhhrdqz
twothree2
seven3threeeightsllttbrdcthree
gmpxgkvv4four
vntmvnd4two
fqhvkqrxdtwo1threeqrmg5
5f
fivenscdbpvlz1
vzdcg921fivelplfsbdccrsdsq
sixnine2rnrdqkfpmdfives2
5seveneight4sr9gffive
hknjrm5eight34nine
7965
2t34two73xlfzpd
626
1kdfnbjgtshsonenine1eight1kzp
two8mxrlgchctdtflkknonezstpfgxsdx28
sixfdjgvfm1sevengg656vqcql
threesevenbjcfxlbml8lhxjmzgspfour
4phlqzbhvk5four
1lkccpmxmlxlqrxzz8lkmbpmdncgpzsxgqdcb
klqcnhjvone75
5two7
2ninecgrltzpgzpfourfour47seven
threeoneeight758threegtwo
54j
seven9lqbfgrsthreefivefour
six7kjm2fzcrdeightfour84
four198766
six3mfgmcrmlnine6lhddlhgl9
rkpksqvfiverxvbseven7
734six
five5one9qd4bkgfdjgtxzlxfjr
17eightsmjzbgdbd2nine
eightqnthjhlnfive7nddfjcxq
fourxkn6two6three5
twoseven2
eightrbhdkpxpbsvmfivethreetwox4f
4bpgpdngvnineh
lvpxjhgkz77pthnktwomvonekjvkkbr
gqpcqlzfive2threejfknlchf
zgjslb74two
sixhxmt4
4bcqxrcf
onesix59jktnrtlone9one
9seven1msixkhxbkgbnqkpfjrr
four1lhslrtdrnxxsmptbjjgj5
eightcvbzqczt9ninegxlpsevenfour
threenine2foursixtwo2bldhzmsdvj
two8dmzmbnkjqfdqtmzxndrsnkrvdnl
9lqtcrvkvhdkmbfivenine6tkqh
foureightjdpqhldvzeight17vbkmjvfs
vkzvsphbltfxjfxxsl9
shdgdlft27
87twopsix7eightwoj
57threeone
pshspdc9fourone5eight79sxrjkcmcb
sevenhmzvvbpdktxxjrlvntx1
ninethreendlnnine9
nine2tkmkrpccone
7sixsixvdv
vpktkldsjcpsxjztmthree3onefdbcxkrvhcm
9lqmbltfoursnsbrqn22sdnrpxfsjfour
five8mgctnvstcone24fcfbcqtknjmddknkzeightwod
8xfbqxtwonmjthm24twofivelckrr
xhnldpddxcjnm5twoddtqkqd
sevensixdrcvgfxqpmvrtmgqdjqvclgnqjsfhnbbs3jvxnmjqcbtshp
mfour1zcpnpfcdeight
6twofour
r1seven7tdjgxshntl
eight5sixh
xntwonetwofqpxsgshnbhjcsrld75fourfjbllqscg
three3vbvqklcp7pfkngtnineslmrlq
7six3onenmttntsfq3
fivefive1seveneight
p2rpjkcpcsix75fivednd
ninekkzpsone167hnqlhvmtbffive
2qmtbkslqgjkrqhnine3eightonefour
2nlfivefive6
xjvbkbtdqhgvsseven719fiveseven
prbtnineqdvknxkblzgj7
lkgvhtfreightninegtfrmqhd4njvsgdrhdqfxrjrcvvcz4zs
25four24dhqqkpbprx
29two
7xsbfsfivexrrxhthree
fivesrnvbdtnqnfourninelctfnxkbtv6eight
9kkpzgthree1stb
sevenhzxdbbb81one
2jszvqhv
zkvq25sj2
hxxfg4kqjbjpjczzddrx9spkgdcvcsvmblvfxxcn
486l2clmtcgfive47
dfhkfnfour76
52hnpcc
kmktwonejdpgch9tthree
x6bkqrlhhcm96
tf7kndclhgjsoneoneightxcx
mdzmsfkl5pnine8
ghzv2three
vctrqxfsxhtblghfchthzhninefk3eightqjjjjg
three2jmvkmfour
rlkbtjpnlctsxpfp521rxbjdfsixhvp1
7vxlkff32mdfsqrmbnmtwotwo2nine
gl6seven35two1
two2rjtvndrzvthreethree
tjpxszlthree54hsftvc
jsthree48
seven6fourtwotwo
threetpnh5ninernztgb
18pvqllhjf5eight9vldjjqcjfmlhnddone
two9nine6hgbprcpxvmntdjkfivetwonept
7four8eightktlhdpmptone
rrzbgtfrrqkspsix3rkpzddzrbcrzvxzstjbqhmqq
84qxbnxdpqppjfiveeightfive
n6two1brpjhf
plmkvpjbqr1
sixsixqbksfrndvg42hclgpgfggpxmts9
1htlmmvbnsix
dleightwolvbvmsggs9njseven5fivethreenine
eight6kxqqdnqp
5eightgdvgthfiveshthreesixfive
6seventntzffjkkvvhtgtwoonethreefivekzvptvxfjg
three6blrfsgdqsxgkbqj3
eight1eighteight8
8four419eighteight1bpv

Added scraps/advent2023/01/sh/01.sh.













































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
#!/bin/sh
num_line()
{
    digits=$(echo "$1" |
		 sed -E \
		     -e 's/(zero)/\10\1/g' \
		     -e 's/(one)/\11\1/g' \
		     -e 's/(two)/\12\1/g' \
		     -e 's/(three)/\13\1/g' \
		     -e 's/(four)/\14\1/g' \
		     -e 's/(five)/\15\1/g' \
		     -e 's/(six)/\16\1/g' \
		     -e 's/(seven)/\17\1/g' \
		     -e 's/(eight)/\18\1/g' \
		     -e 's/(nine)/\19\1/g' |
		 grep -E -o '[[:digit:]]' |
		 tr -d '\n'
	  )
	    		   
    digit1=$(echo "$digits" | grep -o '^[[:digit:]]')
    digit2=$(echo "$digits" | grep -o '[[:digit:]]$')
    number=$digit1$digit2
    echo ${number:-0}
}

total="0"

while read line; do
    line_val=$(num_line $line)
    total=$(echo $(($total + $line_val)))

    if [ -n "$DEBUG" ]; then
	echo line: $line
	echo line_val: $line_val
    fi
done < $1

echo $total

Added scraps/advent2023/01/test.sh.



























































































































































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
#! /usr/bin/env atf-sh
set -e

## test cases
atf_init_test_cases() {
    atf_add_test_case no_numbers
    atf_add_test_case one_number
    atf_add_test_case two_numbers
    atf_add_test_case more_numbers
    atf_add_test_case multiline
    atf_add_test_case word_zero
    atf_add_test_case words
    atf_add_test_case words_mixed
    atf_add_test_case words_combined
    atf_add_test_case part1
    atf_add_test_case part2
}

check_input()
{
    cat > input <<-EOF
    $1
EOF
    atf_check -s exit:0 -o inline:"$2\n" $(atf_get_srcdir)/sh/01.sh input
}

## no numbers

atf_test_case no_numbers

no_numbers_body() {
    check_input "foo" "0"
}

## one number

atf_test_case one_number

one_number_body() {
    cat > input <<-EOF
    f1o
EOF
    atf_check -s exit:0 -o inline:"11\n" $(atf_get_srcdir)/sh/01.sh input
}

## two numbers

atf_test_case two_numbers

two_numbers_body() {
    check_input "f1o2" "12"
}

## more numbers

atf_test_case more_numbers

more_numbers_body() {
    check_input "f1o2a3b4c" "14"
}

## multiline

atf_test_case multiline

multiline_body() {
    cat > input <<-EOF
    1
    f3b4
EOF
    atf_check -s exit:0 -o inline:"45\n" $(atf_get_srcdir)/sh/01.sh input
}

## word_zero

atf_test_case word_zero

word_zero_body() {
    cat > input <<-EOF
    zero
    1zero
    zero1
    zerozero
EOF
    atf_check -s exit:0 -o inline:"11\n" $(atf_get_srcdir)/sh/01.sh input
}

## words

atf_test_case words

words_body() {
    cat > input <<-EOF
    one
    two
    three
    four
    five
    six
    seven
    eight
    nine
EOF
    atf_check -s exit:0 -o inline:"495\n" $(atf_get_srcdir)/sh/01.sh input
}

## words_mixed

atf_test_case words_mixed

words_mixed_body() {
    check_input "onetwoone" "11"
}

## words_combined

atf_test_case words_combined

words_combined_body() {
    check_input "eightwo0" "80"
    check_input "eightwo" "82" # hrm... not sure I agree
    check_input "twone" "21"
    check_input "sevenineight" "78"
    check_input "twoneight" "28"
}

## part1 answer

atf_test_case part1

part1_body() {
    cat > input <<-EOF
    1abc2
    pqr3stu8vwx
    a1b2c3d4e5f
    treb7uchet
EOF
    atf_check -s exit:0 -o inline:"142\n" $(atf_get_srcdir)/sh/01.sh input
}

## part2 answer

atf_test_case part2

part2_body() {
    cat > input <<-EOF
    two1nine
    eightwothree
    abcone2threexyz
    xtwone3four
    4nineeightseven2
    zoneight234
    7pqrstsixteen
EOF

    atf_check -s exit:0 -o inline:"281\n" $(atf_get_srcdir)/sh/01.sh input
}

Added scraps/advent2023/02/Kyuafile.











>
>
>
>
>
1
2
3
4
5
syntax(2)

test_suite('02')

atf_test_program{name='test.sh'}

Added scraps/advent2023/02/sh/02.sh.











































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
#!/bin/sh
read gameline < $1
num_red=$2
num_green=$3
num_blue=$4

possible="no"
name=$(echo "$gameline" | grep -o '^.*:' | sed -e 's/://')

max_red=$(echo "$gameline" | grep -E -o '[[:digit:]]+ red' | sed -e 's/ red//' | sort -r | head -n 1)
max_red=${max_red:-0}
max_green=$(echo "$gameline" | grep -E -o '[[:digit:]]+ green' | sed -e 's/ green//' | sort -r | head -n 1)
max_green=${max_green:-0}
max_blue=$(echo "$gameline" | grep -E -o '[[:digit:]]+ blue' | sed -e 's/ blue//' | sort -r | head -n 1)
max_blue=${max_blue:-0}

if [ "$max_red" -le "$num_red" -a "$max_green" -le "$num_green" -a "$max_blue" -le "$num_blue" ]; then
    possible="yes"
fi

echo $name $possible

Added scraps/advent2023/02/test.sh.



































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
#! /usr/bin/env atf-sh
set -e

## test cases
atf_init_test_cases() {
    atf_add_test_case one_play
    atf_add_test_case two_plays
}

check_input()
{
    cat > input <<-EOF
    $1
EOF
    atf_check -s exit:0 -o inline:"$2\n" $(atf_get_srcdir)/sh/02.sh input $3 $4 $5
}

atf_test_case one_play

one_play_body() {
    check_input "1.1: 1 red" "1.1 no" 0 0 0
    check_input "1.2: 1 red" "1.2 yes" 1 0 0
    check_input "1.3: 1 red, 2 green, 1 blue" "1.3 no" 1 1 1
    check_input "1.4: 1 red, 2 green, 1 blue" "1.4 yes" 2 2 2 
    check_input "1.4: 2 green, 1 blue, 3 red" "1.4 yes" 3 2 1
}

atf_test_case two_plays

two_plays_body() {
    check_input "1.1: 1 red; 2 red" "1.1 no" 1 0 0
    check_input "1.2: 1 red; 2 red" "1.2 yes" 2 0 0
}

Added scraps/bupwut/Justfile.





>
>
1
2
@kyua *args:
  PATH=$(pwd):$PATH kyua {{args}}

Added scraps/bupwut/Kyuafile.













>
>
>
>
>
>
1
2
3
4
5
6
syntax(2)

test_suite('bupwut')

plain_test_program{name='test_compare.sh'}
plain_test_program{name='test_install.sh'}

Added scraps/bupwut/LICENSE.

















































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
BSD 2-Clause License

Copyright (c) 2022, Pat Maddox

Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:

1. Redistributions of source code must retain the above copyright notice, this
   list of conditions and the following disclaimer.

2. Redistributions in binary form must reproduce the above copyright notice,
   this list of conditions and the following disclaimer in the documentation
   and/or other materials provided with the distribution.

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

Added scraps/bupwut/README.md.



























































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
# bupwut - Backup up... what?

_Scrapped: `/etc/` and `/usr/local/etc` make up a total of ~3MB.
It makes way more sense to back them up entirely, and then use `mtree(8)` to figure out what to restore._

What files do you need to back up?
bupwut has the answer.

bupwut compares a current directory to a base directory, and generates a list of changed files.

## Usage

Compare dirs:

`bupwut compare -b /path/to/base -c /path/to/current`

Exclude FreeBSD-specific files (e.g. kernel modules):

`bupwut compare -b /path/to/base -c /path/to/current -F`

Install FreeBSD to `/path/to/base` to give a known basis of comparison:

`bupwut install -d /path/to/base`

## Notes

`bupwut install` runs `freebsd-update(8)` on the target dir.
`freebsd-update(8)` has some unavoidable(?) side-effects, like restarting `sshd(8)`.
It also needs to run as root.

Added scraps/bupwut/TODO.md.



























>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
# bupwut TODO

- [x] [`mtree(8)`](https://man.freebsd.org/cgi/man.cgi?mtree(8)) compare explicit paths, list different files
- [x] look for extra files, exclude missing files
- [ ] explicit includes paths
- [x] exclude kernel modules
- [ ] install and update freebsd
- [ ] see if system comes with its own mtree files
- [ ] install correct version (major, minor, patch)
- [ ] implicit path `/var/db/bupwut/base` and `/`
- [ ] look in `/usr/local/etc` for port config files that have changed
- [ ] man page
- [ ] home dirs? those should be fully backed up anyway

Added scraps/bupwut/bupwut.























































































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
#!/bin/sh
set -e

command=$1
shift || :

base_dir=""
curr_dir=""
dryrun=""
install_dir=""
freebsd_filter=""

usage() {
    >&2 echo "Usage:"
    usage_compare
    usage_install
}

usage_compare() {
    >&2 echo "bupwut compare [-b /path/to/base -c /path/to/current]"
}

usage_install() {
    >&2 echo "bupwut install -d /path/to/base [-n]"
}

get_compare_opts() {
    while getopts 'b:c:F' cmdopt; do
	case $cmdopt in
	    b)
		base_dir=$OPTARG
		;;
	    c)
		curr_dir=$OPTARG
		;;
	    F)
		freebsd_filter="yes"
		;;
	    *)
		usage_compare
		exit 1
		;;
	esac
    done
}

compare() {
    get_compare_opts $@
    base_mtree=$(mktemp -t bupwut-base)
    curr_mtree=$(mktemp -t bupwut-curr)
    if [ -n "$freebsd_filter" ]; then
	filter_file=$(mktemp -t bupwut-filter)
	freebsd_filter="-X $filter_file"
	cat <<EOF > $filter_file
*.ko
EOF
    fi
    mtree $freebsd_filter -c -k sha256 -p $base_dir > $base_mtree
    mtree $freebsd_filter -c -k sha256 -p $curr_dir > $curr_mtree
    mtree -f $base_mtree -f $curr_mtree -k sha256 | grep ' file sha256digest=.*$' | grep $'^\t' | sed -e 's/^[[:space:]]*//' -e 's/ file sha256digest=.*$//' | sort -u
    rm $base_mtree $curr_mtree
    if [ -n "$freebsd_filter" ]; then
	rm $filter_file
    fi
}

get_install_opts() {
    while getopts 'nd:' cmdopt; do
	case $cmdopt in
	    n)
		dryrun="yes"
		;;
	    d)
		install_dir=$OPTARG
		;;
	    *)
		usage_install
		exit 1
		;;
	esac
    done
}

install() {
    get_install_opts $@
    if [ -z "$install_dir" ]; then
	usage_install
	exit 1
    fi
    if [ ! -d "$install_dir" ]; then
	>&2 echo "Error: $install_dir is missing"
	exit 1
    fi
    if [ -n "$(ls -A $install_dir)" ]; then
	if [ -f "$install_dir/bin/freebsd-version" ]; then
	    >&2 echo "Error: FreeBSD appears to already be installed in $install_dir"
	else
	    >&2 echo "Error: $install_dir must be empty"
	fi
	exit 1
    fi

    if [ -n "$dryrun" ]; then
	return
    fi

    fetch -o - https://download.freebsd.org/releases/amd64/13.2-RELEASE/base.txz | tar -C $install_dir -xz
    PAGER=cat freebsd-update -b $(realpath $install_dir) -d $(realpath $install_dir/var/db/freebsd-update) --not-running-from-cron fetch install
}


case $command in
    compare)
	compare $@
	;;
    install)
	install $@
	;;
    *)
	usage
	exit 1
	;;
esac

Added scraps/bupwut/test_compare.sh.





























































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
#!/bin/sh
set -ex

setup() {
    rm -rf base current
    mkdir base current
    echo 'this is bar' > base/bar
    echo 'this is baz' > base/baz
    echo 'this is foo' > base/foo
    cp base/* current/
}

# files match
setup
test "$(bupwut compare -b base -c current)" = ""

# files are changed
setup
echo 'no longer bar' > current/bar
echo 'no longer foo' > current/foo
test "$(bupwut compare -b base -c current)" = "$(printf "bar\nfoo")"

# new and deleted files
setup
rm current/baz
touch current/foozle current/qux
touch current/qux
test "$(bupwut compare -b base -c current)" = "$(printf "foozle\nqux")"

# files in dir
setup
mkdir -p current/subdir
touch current/subdir/foo
test "$(bupwut compare -b base -c current)" = "$(printf "subdir/foo")"

# freebsd filter off
setup
mkdir current/modules
touch current/modules/foo.ko
test "$(bupwut compare -b base -c current)" = "modules/foo.ko"

# freebsd filter on
setup
mkdir current/modules
touch current/modules/foo.ko
test "$(bupwut compare -b base -c current -F)" = ""

Added scraps/bupwut/test_install.sh.













































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
#!/bin/sh
set -e

# dir doesn't exist
set +e
bupwut install -n -d base
result=$?
set -e
test $result -eq 1

# dir exist but has files
mkdir base
touch base/foo
set +e
bupwut install -n -d base
result=$?
set -e
test $result -eq 1

# success
rm base/*
bupwut install -n -d base

Added scraps/gauntlet-old/Kyuafile.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
syntax(2)

test_suite('gauntlet')

include('go/Kyuafile')
include('ref_cpl/Kyuafile')
include('sh/Kyuafile')

Added scraps/gauntlet-old/Makefile.





































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
DIRS=	go \
	ref_cpl

NOOPS=	_build

.PHONY: ${DIRS}

.for d in ${DIRS}
all: ${d}
${d}:
	cd ${d} && ${MAKE}

clean: clean-${d}
clean-${d}:
	cd ${d} && ${MAKE} clean
.endfor

.include "gauntlet.mk"

Added scraps/gauntlet-old/exp-ls/Kyuafile.













>
>
>
>
>
>
1
2
3
4
5
6
syntax(2)

test_suite('gauntlet')

atf_test_program{name='hello.sh'}
atf_test_program{name='goodbye.sh'}

Added scraps/gauntlet-old/exp-ls/goodbye.sh.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
#! /usr/bin/env atf-sh
set -e

TESTDIR=/usr/home/patmaddox/freebsd-releng-13.2/usr/tests
PATH=$(realpath $(atf_get_srcdir))/goodbye:$PATH

. $TESTDIR/bin/ls/ls_tests

Added scraps/gauntlet-old/exp-ls/goodbye/ls.





>
>
1
2
#!/bin/sh
echo BYE BYE

Added scraps/gauntlet-old/exp-ls/hello.sh.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
#! /usr/bin/env atf-sh
set -e

TESTDIR=/usr/home/patmaddox/freebsd-releng-13.2/usr/tests
PATH=$(realpath $(atf_get_srcdir))/hello:$PATH

. $TESTDIR/bin/ls/ls_tests

Added scraps/gauntlet-old/exp-ls/hello/ls.





>
>
1
2
#!/bin/sh
echo hello, world

Added scraps/gauntlet-old/gauntlet.mk.









































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
.PHONY: all test
.MAIN: all

.for n in ${NOOPS}
${n}:
	@true
.endfor

.if !target(_build)
_build:
	@mkdir -p _build
.endif

test: all
	kyua test

.if !target(clean)
clean:
	rm -rf _build
.endif

Added scraps/gauntlet-old/gauntlet.sh.



>
1
. $GAUNTLETDIR/gauntlet_cpl.sh

Added scraps/gauntlet-old/gauntlet_cpl.sh.





































































































































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
# Programs from The C Programming Language, second edition
# Numeric prefix indicates the page number where the program is defined
# Reference implementations are in $GAUNTLETDIR/ref_cpl

## test cases
atf_init_test_cases() {
    atf_add_test_case 006_hello
    atf_add_test_case 009_fahr_cels_int
    atf_add_test_case 012_fahr_cels_float
    atf_add_test_case 016_cpio
    atf_add_test_case 018_countc
    atf_add_test_case 019_countl
    atf_add_test_case 020_countw
}

## 006_hello
atf_test_case 006_hello

006_hello_body() {
    require_prog HELLO
    atf_check -s exit:0 -o inline:"hello, world\n" $HELLO
}

## 009_fahr_cels_int
atf_test_case 009_fahr_cels_int

009_fahr_cels_int_body() {
    require_prog FAHR_CELS_INT

    cat > expected <<EOF
0	-17
20	-6
40	4
60	15
80	26
100	37
120	48
140	60
160	71
180	82
200	93
220	104
240	115
260	126
280	137
300	148
EOF

    atf_check -o file:expected $FAHR_CELS_INT
}

## 012_fahr_cels_float
atf_test_case 012_fahr_cels_float

012_fahr_cels_float_body() {
    require_prog FAHR_CELS_FLOAT

    cat > expected <<EOF
  0  -17.8
 20   -6.7
 40    4.4
 60   15.6
 80   26.7
100   37.8
120   48.9
140   60.0
160   71.1
180   82.2
200   93.3
220  104.4
240  115.6
260  126.7
280  137.8
300  148.9
EOF

    atf_check -o file:expected $FAHR_CELS_FLOAT
}

## 016_cpio
atf_test_case 016_cpio

016_cpio_body() {
    require_prog CPIO

    cat > data <<EOF
foo
bar
123
EOF

    atf_check -o file:data $CPIO < data
}

## 018_countc
atf_test_case 018_countc

018_countc_body() {
    require_prog COUNTC

    echo "foo" > data

    atf_check -o inline:"4\n" $COUNTC < data
}

## 019_countl
atf_test_case 019_countl

019_countl_body() {
    require_prog COUNTL

    cat > data <<EOF
foo
bar
baz
EOF

    atf_check -o inline:"3\n" $COUNTL < data
}

## 020_countw
atf_test_case 020_countw

020_countw_body() {
    require_prog COUNTW

    cat > data <<EOF
foo bar baz
I like to eat apples
figs not so much
EOF

    lines=$(wc -l data | cut -w -f 2)
    words=$(wc -w data | cut -w -f 2)
    chars=$(wc -c data | cut -w -f 2)

    atf_check -o inline:"$lines $words $chars\n" $COUNTW < data
}

require_prog()
{
    eval "prog=\$$1"
    if [ -z "$prog" ]; then
	atf_skip "must set $1 command"
    fi
}

Added scraps/gauntlet-old/go/Kyuafile.











>
>
>
>
>
1
2
3
4
5
syntax(2)

test_suite('gauntlet_go')

atf_test_program{name='gauntlet.sh'}

Added scraps/gauntlet-old/go/Makefile.













>
>
>
>
>
>
1
2
3
4
5
6
all: _build _build/hello

_build/hello: hello.go
	go build -o ${.TARGET} ${.ALLSRC}

.include "../gauntlet.mk"

Added scraps/gauntlet-old/go/gauntlet.sh.



















>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
#! /usr/bin/env atf-sh
set -e

GAUNTLETDIR=$(atf_get_srcdir)/..
BUILD=$(atf_get_srcdir)/_build

HELLO="$BUILD/hello"

. $GAUNTLETDIR/gauntlet.sh

Added scraps/gauntlet-old/go/hello.go.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
package main

import "fmt"

func main() {
  fmt.Println("hello, world")
}

Added scraps/gauntlet-old/ref_cpl/Kyuafile.











>
>
>
>
>
1
2
3
4
5
syntax(2)

test_suite('gauntlet_ref_cpl')

atf_test_program{name='gauntlet.sh'}

Added scraps/gauntlet-old/ref_cpl/Makefile.



































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
PROGS=	hello \
	fahr_cels_int \
	fahr_cels_float \
	cpio \
	countc \
	countl \
	countw

all: _build

.for p in ${PROGS}
all: _build/${p}
_build/${p}: ${p}.c
	cc -o ${.TARGET} ${.ALLSRC}
.endfor

.include "../gauntlet.mk"

Added scraps/gauntlet-old/ref_cpl/countc.c.























>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
#include <stdio.h>

int main()
{
  long nc;

  nc = 0;
  while (getchar() != EOF)
    ++nc;
  printf("%ld\n", nc);
}

Added scraps/gauntlet-old/ref_cpl/countl.c.

























>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
#include <stdio.h>

int main()
{
  int c, nl;

  nl = 0;
  while ((c = getchar()) != EOF)
    if (c == '\n')
      ++nl;
  printf("%d\n", nl);
}

Added scraps/gauntlet-old/ref_cpl/countw.c.

















































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
#include <stdio.h>

#define IN  1
#define OUT 0

int main()
{
  int c, nl, nw, nc, state;

  state = OUT;
  nl = nw = nc = 0;
  while ((c = getchar()) != EOF) {
    ++nc;
    if (c == '\n')
      ++nl;
    if (c == ' ' || c == '\n' || c == '\t')
      state = OUT;
    else if (state == OUT) {
      state = IN;
      ++nw;
    }
  }
  printf("%d %d %d\n", nl, nw, nc);
}

Added scraps/gauntlet-old/ref_cpl/cpio.c.

























>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
#include <stdio.h>

int main()
{
  int c;

  c = getchar();
  while (c != EOF) {
    putchar(c);
    c = getchar();
  }
}

Added scraps/gauntlet-old/ref_cpl/fahr_cels_float.c.





































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
#include <stdio.h>

int main()
{
  float fahr, celsius;
  int lower, upper, step;

  lower = 0;
  upper = 300;
  step = 20;

  fahr = lower;
  while (fahr <= upper) {
    celsius = (5.0 / 9.0) * (fahr - 32.0);
    printf("%3.0f %6.1f\n", fahr, celsius);
    fahr = fahr + step;
  }
}

Added scraps/gauntlet-old/ref_cpl/fahr_cels_int.c.





































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
#include <stdio.h>

int main()
{
  int fahr, celsius;
  int lower, upper, step;

  lower = 0;
  upper = 300;
  step = 20;

  fahr = lower;
  while (fahr <= upper) {
    celsius = 5 * (fahr-32) / 9;
    printf("%d\t%d\n", fahr, celsius);
    fahr = fahr + step;
  }
}

Added scraps/gauntlet-old/ref_cpl/gauntlet.sh.













































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
#! /usr/bin/env atf-sh
set -e

GAUNTLETDIR=$(atf_get_srcdir)/..
BUILD=$(atf_get_srcdir)/_build

PROGS="
hello
fahr_cels_int
fahr_cels_float
cpio
countc
countl
countw
"

for p in $PROGS; do
    progname=$(echo "$p" | tr '[:lower:]' '[:upper:]')
    eval "$progname=$BUILD/$p"
done

. $GAUNTLETDIR/gauntlet_cpl.sh

Added scraps/gauntlet-old/ref_cpl/hello.c.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
#include <stdio.h>

int main()
{
  printf("hello, world\n");
  return 0;
}

Added scraps/gauntlet-old/sh/Kyuafile.











>
>
>
>
>
1
2
3
4
5
syntax(2)

test_suite('gauntlet_sh')

atf_test_program{name='gauntlet.sh'}

Added scraps/gauntlet-old/sh/gauntlet.sh.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
#! /usr/bin/env atf-sh
set -e

GAUNTLETDIR=$(atf_get_srcdir)/..

HELLO=$(atf_get_srcdir)/hello.sh

. $GAUNTLETDIR/gauntlet.sh

Added scraps/gauntlet-old/sh/hello.sh.





>
>
1
2
#!/bin/sh
echo hello, world

Added scraps/mksite/.gitignore.





>
>
1
2
out
paths

Added scraps/mksite/Makefile.























































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
INFILES != find src -type f -name '*.md'

.PHONY: all clean rebuild test
.MAIN: all

all:

clean:
	rm -rf out

rebuild: clean .WAIT all

.for f in ${INFILES}
out_${f} != ./bin/mksite.sh getoutpath ${f}

${out_${f}}: ${f}
	./bin/mksite.sh export ${f} ${.TARGET}

all: ${out_${f}}

.for l in ${:! ./bin/mksite.sh getlinks $f !}
$l: $f
.endfor
.endfor

make test:
	cd test && kyua test

Added scraps/mksite/README.md.



























>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
# mksite

Making a site shouldn't be so hard.
There are all these static site generators out there.
They want to be all things to all people.
I just need some things for me.

## TODO

- [ ] add tests
- [ ] verify links
- [ ] benchmark test: lots of pages
- [ ] navigate links in editor

Added scraps/mksite/bin/mksite.sh.















































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
#!/bin/sh

mksite_export()
{
    mypath=$(mksite_getoutpath ${1})
    mydir=$(dirname $mypath)
    if [ ! -d $mydir ]; then mkdir -p $mydir; fi

    ids=$(cat $1 | grep -o '(%[[:digit:]]*%)' | grep -o '[[:digit:]]*' | sort -u)
    replace="-e '/^---$/,/^---$/d'"

    for id in $ids; do
	file=$(find src -name "${id}-*.md")
	path=$(mksite_getoutpath $file)
	therelpath=$(mksite_relpath $mypath $path)
	replace="${replace} -e 's;(%${id}%);(${therelpath});g'"
    done

    eval "sed $replace" $1 | markdown -o $2
}

mksite_getid()
{
    id=$(basename $1 | sed -n 's/^\([[:digit:]]*\)-.*/\1/p')
    if [ -n "$id" ]; then
	echo $id
    fi
}

mksite_getlinks()
{
    file=$1
    id=$(mksite_getid $file)
    links=$(grep -rl "(%${id}%)" src | sort -u) # | grep -o '[[:digit:]]*')

    for link in $links; do
	echo $(mksite_getoutpath $link)
    done
}

mksite_getoutpath()
{
    file=$1
    fmpath=$(sed -n '/---/,/---/p' $file | grep '^path:' | sed -e 's/^path: //')
    dir=$(echo $file | sed -e 's|^src/|out/|' | xargs dirname)
    base=$(basename $file)

    if [ -z $fmpath ]; then
	file=$(echo $base | sed -e 's/^[[:digit:]]*-//' -e 's/.md$//')
	outpath=${dir}/${file}
    else
	outpath=out/${fmpath}
    fi

    echo ${outpath}.html
}

mksite_relpath()
{
    source_file=$(basename $1)
    target_file=$(basename $2)

    source_dir=$(dirname $1)
    target_dir=$(dirname $2)

    common_part=$source_dir
    result=

    while [ "${target_dir##$common_part}" = $target_dir ]; do
	common_part=$(dirname $common_part)
	result=../$result
    done

    forward_part=$(echo ${target_dir##$common_part} | sed 's|^/||')

    if [ -n "$result" ] && [ -n "$forward_part" ]; then
	result=$result$forward_part/
    elif [ -n "$forward_part" ]; then
	result=$forward_part/
    fi

    echo $result$target_file
}

cmd=$1
shift
mksite_${cmd} "$@"

Added scraps/mksite/src/202303271300-page1.md.



















>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
# First Page

This is my first page.

It links to [my page 2](%202303271400%). It likes it so much it [links it twice](%202303271400%).

Just for fun, [a third time](%202303271400%).

It also links to a file [in a subdir](%202303280131%).

Added scraps/mksite/src/202303271400-page2.md.





















>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
---
path: mypage2
---

# My Page 2

This page has a different path.
It is set in Markdown frontmatter.

It links to [page 1](%202303271300%).

Added scraps/mksite/src/3-foo.md.



>
1
Link to [page 2](%202303271400%)

Added scraps/mksite/src/subdir/202303280131-another-page.md.







>
>
>
1
2
3
# This is just another page

It links to [page 1](%202303271300%).

Added scraps/mksite/test/.gitignore.





>
>
1
2
/*/Makefile
/*/bin

Added scraps/mksite/test/Kyuafile.











>
>
>
>
>
1
2
3
4
5
syntax(2)

test_suite('mksite')

atf_test_program{name='test.sh'}

Added scraps/mksite/test/test.sh.







































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
#! /usr/bin/env atf-sh

## basic
atf_test_case test_basic

test_basic_head() {
}

test_basic_body() {
    copy_mksite test_basic
    atf_check -s exit:0 -o ignore -x "make -C $(atf_get_srcdir)/test_basic rebuild"
    atf_check -s exit:0 -o match:'<h1>Hello World</h1>' -x "cat $(atf_get_srcdir)/test_basic/out/index.html"
}

## no id
atf_test_case test_no_id

test_no_id_head() {
}

test_no_id_body() {
    copy_mksite test_no_id
    atf_check -s exit:0 -o match:'out\/no-id.html' -x "make -C $(atf_get_srcdir)/test_no_id rebuild"
    atf_check -s exit:0 -o match:'<h1>Hello World</h1>' -x "cat $(atf_get_srcdir)/test_no_id/out/no-id.html"
}

## custom path
atf_test_case test_custom_path

test_custom_path_head() {
}

test_custom_path_body() {
    copy_mksite test_custom_path
    atf_check -s exit:0 -o ignore -x "make -C $(atf_get_srcdir)/test_custom_path rebuild"
    atf_check -s exit:0 -o match:'<h1>Hello World</h1>' -o not-match:'foo' -x "cat $(atf_get_srcdir)/test_custom_path/out/foo/bar/baz.html"
}

## basic link
atf_test_case test_basic_link

test_basic_link_head() {
}

test_basic_link_body() {
    copy_mksite test_basic_link
    atf_check -s exit:0 -o ignore -x "make -C $(atf_get_srcdir)/test_basic_link rebuild"
    atf_check -s exit:0 -o match:'<a href="bar.html">link to bar</a>' -x "cat $(atf_get_srcdir)/test_basic_link/out/foo.html"
}

## relative link
atf_test_case test_relative_link

test_relative_link_head() {
}

test_relative_link_body() {
    copy_mksite test_relative_link
    atf_check -s exit:0 -o ignore -x "make -C $(atf_get_srcdir)/test_relative_link rebuild"
    atf_check -s exit:0 -o match:'<a href="../bar.html">link to bar</a>' -x "cat $(atf_get_srcdir)/test_relative_link/out/subdir/foo.html"
}

## update path
atf_test_case test_update_path

test_update_path_head() {
}

test_update_path_body() {
    copy_mksite test_update_path
    cp $(atf_get_srcdir)/test_update_path/2-bar.orig $(atf_get_srcdir)/test_update_path/src/2-bar.md

    atf_check -s exit:0 -o match:'out\/foo.html' -o match:'out\/bar.html' -o match:'out\/baz.html' -x "make -C $(atf_get_srcdir)/test_update_path rebuild"
    atf_check -s exit:0 -o match:'<a href="bar.html">link to bar</a>' -x "cat $(atf_get_srcdir)/test_update_path/out/foo.html"

    sleep 1 # I hate this
    printf '%b' '---\npath: new-bar\n---\n' > $(atf_get_srcdir)/test_update_path/src/2-bar.md
    cat $(atf_get_srcdir)/test_update_path/2-bar.orig >> $(atf_get_srcdir)/test_update_path/src/2-bar.md

    atf_check -s exit:0 -o match:'out\/foo.html' -o match:'out\/new-bar.html' -o not-match:'out\/baz.html' -x "make -C $(atf_get_srcdir)/test_update_path"
    atf_check -s exit:0 -o match:'<a href="new-bar.html">link to bar</a>' -x "cat $(atf_get_srcdir)/test_update_path/out/foo.html"
}

## test cases
atf_init_test_cases() {
    atf_add_test_case test_basic
    atf_add_test_case test_no_id
    atf_add_test_case test_custom_path
    atf_add_test_case test_basic_link
    atf_add_test_case test_relative_link
    atf_add_test_case test_update_path
}

## helpers
copy_mksite() {
    if [ ! -d $(atf_get_srcdir)/${1}/bin ]; then mkdir $(atf_get_srcdir)/${1}/bin; fi
    cp $(atf_get_srcdir)/../Makefile $(atf_get_srcdir)/${1}
    cp $(atf_get_srcdir)/../bin/mksite.sh $(atf_get_srcdir)/${1}/bin/
}

Added scraps/mksite/test/test_basic/src/0-index.md.



>
1
# Hello World

Added scraps/mksite/test/test_basic_link/src/1-foo.md.



>
1
[link to bar](%2%)

Added scraps/mksite/test/test_basic_link/src/2-bar.md.



>
1
this is bar

Added scraps/mksite/test/test_custom_path/src/1-index.md.











>
>
>
>
>
1
2
3
4
5
---
path: foo/bar/baz
---

# Hello World

Added scraps/mksite/test/test_no_id/src/no-id.md.



>
1
# Hello World

Added scraps/mksite/test/test_relative_link/src/2-bar.md.



>
1
this is bar

Added scraps/mksite/test/test_relative_link/src/subdir/1-foo.md.



>
1
[link to bar](%2%)

Added scraps/mksite/test/test_update_path/2-bar.orig.



>
1
this is bar

Added scraps/mksite/test/test_update_path/src/1-foo.md.



>
1
[link to bar](%2%)

Added scraps/mksite/test/test_update_path/src/2-bar.md.









>
>
>
>
1
2
3
4
---
path: new-bar
---
this is bar

Added scraps/mksite/test/test_update_path/src/3-baz.md.



>
1
this is baz

Added scraps/zolatest/.gitignore.



>
1
public

Added scraps/zolatest/Justfile.











>
>
>
>
>
1
2
3
4
5
help:
  @just -l

serve:
  zola serve

Added scraps/zolatest/config.toml.

































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
# The URL the site will be built for
base_url = "http://patzola.test"

# Whether to automatically compile all Sass files in the sass directory
compile_sass = true

# Whether to build a search index to be used later on by a JavaScript library
build_search_index = true

[markdown]
# Whether to do syntax highlighting
# Theme can be customised by setting the `highlight_theme` variable to a theme supported by Zola
highlight_code = true

[extra]
# Put all your custom variables here

Added scraps/zolatest/content/adraft.md.



















>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
+++
title = "a draft"
draft = true
+++

This page is a draft.
It will not be seen.

It links to [published](@/published.md).

Added scraps/zolatest/content/blog/_index.md.





















>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
+++
title = "Blog home page"
sort_by = "date"
template = "blog.html"
page_template = "blog-page.html"
+++

This is my blog index page.
I don't actually want a blog.
I'm just following the zola tutorial.

Added scraps/zolatest/content/blog/first.md.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
+++
title = "First blog post"
date = 2023-03-22
+++

This is my first blog post.
Like I said, I don't want a blog.
It's nice that I can learn Zola from the ground up though.

Added scraps/zolatest/content/blog/ideas.md.

















































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
+++
title = "Ideas"
date = 2023-03-24
+++

## just: add porcelain to your plumbing

I'm sensing a good way to work with just.
Write scripts, add them to bin directory.
Call them from just.

It may seem like overkill at first.
It adds consistency to the project.
I can go into any folder and type `just`.
It will print out the main commands I need to know.
I don't need a README just to get going.

Comes from the git idea of plumbing and porcelain.

## Thinkpad E495

I really like it so far.
The only thing I don't like is the screen isn't bright enough for me.
I don't know if that's a limitation of the screen, or if FreeBSD isn't configured correctly.
I believe I have it at 100% brightness, and it's just a tad dim.
Everything else works great so far though, so I will most likely keep it.
I wonder if it's possible to get a new panel?
Also I need to figure out how to disable / soften the beep.
It's loud and stupid.

It's been a couple days now, and it's working great.
Mostly.
It does appear that the battery drained 10% while in hibernate mode for the day, which seems like a lot.
The keyboard is pretty clackey... I kind of like it, but I could see how it could get annoying.

It's still just not quite bright enough.
I've ordered an X1 Carbon Gen 9, which is more powerful, and brighter.
The tradeoff there is that it doesn't have an ethernet port, so I will always need a USB dongle.

The E495 is super quiet.
I never the fan when browsing.
I've heard it lightly when building the kernel.

## git commit prefix

The FreeBSD monorepos use a prefix to indicate which subsystem a commit refers to.
I've been doing the same thing with my monorepo.
I wonder if I could script it?

## auto-shutdown package

I want to have a beefy build server in the cloud.
I don't want to pay for it all the time, only when it's building.
I don't want to accidentally leave it running.
Create a cron job that checks to see if poudriere has been active for some period of time, and shuts down otherwise.
Maybe I could even just have an instance that runs via Cirrus, but attaches the poudriere disk.

Added scraps/zolatest/content/blog/second.md.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
+++
title = "Second blog post"
date = 2023-03-23
+++

This is my second blog post.
I just copied it from the first.

Added scraps/zolatest/content/published.md.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
+++
title = "published"
+++

This page is published.
How about that?

It doesn't link to adraft... yet.

Added scraps/zolatest/templates/base.html.



























>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
<!DOCTYPE html>
<html lang="en">
  <head>
    <meta charset="utf-8">
    <title>My Zola Test Site</title>
  </head>

  <body>
    <div>
      {% block content %} {% endblock %}
    </div>
  </body>
</html>

Added scraps/zolatest/templates/blog-page.html.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
{% extends "base.html" %}

{% block content %}
<h1>{{ page.title }}</h1>
<p><strong>{{ page.date }}</strong></p>
{{ page.content | safe }}
{% endblock content %}

Added scraps/zolatest/templates/blog.html.























>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
{% extends "base.html" %}

{% block content %}
<h1>{{ section.title }}</h1>
{{ section.content | safe }}
<ul>
  {% for page in section.pages %}
  <li><a href="{{ page.permalink | safe }}">{{ page.title }}</a></li>
  {% endfor %}
</ul>
{% endblock content %}

Added scraps/zolatest/templates/index.html.

























>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
{% extends "base.html" %}

{% block content %}
<h1>My Zola Test Index</h1>
<p>
  This is my Zola test.
  I have a <a href="{{ get_url(path='@/blog/_index.md') }}">blog</a>.
</p>
<p>
  <a href="https://www.getzola.org/documentation/themes/installing-and-using-themes/">Zola themes documentation</a>
</p>
{% endblock content %}

Added scraps/zolatest/templates/page.html.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
{% extends "base.html" %}

{% block content %}
<h1>{{ page.title }}</h1>

{{ page.content | safe }}
{% endblock content %}

Added scratch/crystal-enum-pattern-matching/Makefile.























>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
.PHONY: all

all: _build/enum _build/types

_build/enum: enum.cr
	@mkdir -p ${.TARGET:H}
	crystal build -o ${.TARGET} enum.cr

_build/types: types.cr
	@mkdir -p ${.TARGET:H}
	crystal build -o ${.TARGET} types.cr

Added scratch/crystal-enum-pattern-matching/enum.cr.































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
# using enums appears to let some values be missed
# (i.e. no else statement is required)

require "option_parser"

enum Command
  Get
  Put
  Delete
  Unknown
end

command = Command::Unknown

OptionParser.parse do |parser|
  parser.on("get", "get") { command = Command::Get }
  parser.on("put", "put") { command = Command::Put }
  parser.on("del", "delete") { command = Command::Delete }
end

case command
in Command::Get
  puts "get"
in Command::Put
  puts "put"
in Command::Delete
  puts "delete"
in Command::Unknown
  STDERR.puts "E: unknown"
  exit 1
end

Added scratch/crystal-enum-pattern-matching/types.cr.





































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
# using classes appears to require that all types are handled

require "option_parser"

class Get; end
class Put; end
class Delete; end
class Unknown; end

command = Unknown.new

OptionParser.parse do |parser|
  parser.on("get", "get") { command = Get.new }
  parser.on("put", "put") { command = Put.new }
  parser.on("del", "delete") { command = Delete.new }
end

def run(c : Get)
  puts "get"
end

def run(c : Put)
  puts "put"
end

def run(c : Delete)
  puts "delete"
end

def run(c : Unknown)
  STDERR.puts "E: unknown"
end

run(command)

Added scratch/valgrind-test/Makefile.





































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
.PHONY: all clean valgrind impls
IMPL?=	c \
	c-heap \
	c-leak \
	crystal \
	go \
	rust \
	d

PROGS=	${IMPL:S/^/_build\/hello-/g}
CFLAGS+=	-Wall -Wextra -Werror

all: ${PROGS}

clean:
	rm -rf _build

impls:
	@echo ${IMPL} | tr ' ' '\n'

valgrind: all
	for p in ${PROGS}; do valgrind --leak-check=full ./$$p; done

_build/hello-c: hello.c
	@mkdir -p ${.TARGET:H}
	cc ${CFLAGS} -o ${.TARGET} ${.ALLSRC}

_build/hello-c-heap: hello-heap.c
	@mkdir -p ${.TARGET:H}
	cc ${CFLAGS} -o ${.TARGET} ${.ALLSRC}

_build/hello-c-leak: hello-leak.c
	@mkdir -p ${.TARGET:H}
	cc ${CFLAGS} -o ${.TARGET} ${.ALLSRC}

_build/hello-crystal: hello.cr
	@mkdir -p ${.TARGET:H}
	crystal build -o ${.TARGET} ${.ALLSRC}

_build/hello-go: hello.go
	@mkdir -p ${.TARGET:H}
	go build -o ${.TARGET} ${.ALLSRC}

_build/hello-rust: hello.rs
	@mkdir -p ${.TARGET:H}
	rustc -o ${.TARGET} ${.ALLSRC}

_build/hello-d: hello.d
	@mkdir -p ${.TARGET:H}
	ldc2 -of ${.TARGET} ${.ALLSRC}

Added scratch/valgrind-test/hello-heap.c.





















>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
#include <stdio.h>
#include <string.h>
#include <stdlib.h>

int main() {
  char *hello = "hello world\n";
  char *str = strdup(hello);
  printf("%s", str);
  free(str);
}

Added scratch/valgrind-test/hello-leak.c.



















>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
#include <stdio.h>
#include <string.h>
#include <stdlib.h>

int main() {
  char *hello = "hello world\n";
  char *str = strdup(hello);
  printf("%s", str);
}

Added scratch/valgrind-test/hello.c.











>
>
>
>
>
1
2
3
4
5
#include <stdio.h>

int main() {
  printf("hello world\n");
}

Added scratch/valgrind-test/hello.cr.



>
1
puts "hello world"

Added scratch/valgrind-test/hello.d.













>
>
>
>
>
>
1
2
3
4
5
6
import std.stdio;

void main()
{
  writeln("hello world");
}

Added scratch/valgrind-test/hello.go.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
package main

import "fmt"

func main() {
	fmt.Println("hello world")
}

Added scratch/valgrind-test/hello.rs.







>
>
>
1
2
3
fn main() {
    println!("hello world");
}

Deleted share/examples/bsd-prog/Makefile.in.

1
2
3
4
5
6
PROG=		%%PROG%%
CFLAGS+=	-Wall -Werror -Wextra -pedantic
MK_MAN=		no
#LDADD=		-lm

.include <bsd.prog.mk>
<
<
<
<
<
<












Deleted share/examples/bsd-prog/bsd-prog.mk.

1
2
3
4
5
6
7
8
9
10
TEMPLATEDIR:=	${.PARSEDIR}

.PHONY: all

.ifndef PROG
.error must assign PROG var
.endif

all:
	sed -e 's|%%PROG%%|${PROG}|g' ${TEMPLATEDIR}/Makefile.in > Makefile
<
<
<
<
<
<
<
<
<
<




















Deleted share/examples/c-jail/Makefile.

1
2
3
4
5
6
7
8
9
10
11
.PHONY: run

run: _build/hello
	doas ./_build/hello

_build/hello: hello.c
	@mkdir -p _build
	cc -Wall -Werror -o ${.TARGET} ${.ALLSRC}

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
<
<
<






















Deleted share/examples/c-jail/hello.c.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
#include <stdlib.h>
#include <unistd.h>
#include <sys/param.h>
#include <sys/types.h>
#include <sys/jail.h>
#include <stdio.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <errno.h>

int main() {
  struct in_addr addr;
  if(!inet_aton("192.168.3.81", &addr)) {
    exit(69);
  }

  struct jail j = {
    JAIL_API_VERSION,
    "/tmp/jroot",
    "chicken",
    "chicken",
    1,
    0,
    &addr,
    NULL
  };

  char *path = malloc(sizeof(char) * 128);
  getcwd(path, 128);
  printf("path outside jail: %s\n", path);

  int jid = jail(&j);
  if(jid >= 0) {
    printf("started jail jid: %i\n", jid);
  } else {
    printf("error: (%i)\n", errno);
    exit(1);
  }

  getcwd(path, 128);
  printf("path in jail: %s\n", path);

  while(1) {
    sleep(3);
  }
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






























































































Deleted share/examples/crystal-enum-pattern-matching/Makefile.

1
2
3
4
5
6
7
8
9
10
11
.PHONY: all

all: _build/enum _build/types

_build/enum: enum.cr
	@mkdir -p ${.TARGET:H}
	crystal build -o ${.TARGET} enum.cr

_build/types: types.cr
	@mkdir -p ${.TARGET:H}
	crystal build -o ${.TARGET} types.cr
<
<
<
<
<
<
<
<
<
<
<






















Deleted share/examples/crystal-enum-pattern-matching/enum.cr.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
# using enums appears to let some values be missed
# (i.e. no else statement is required)

require "option_parser"

enum Command
  Get
  Put
  Delete
  Unknown
end

command = Command::Unknown

OptionParser.parse do |parser|
  parser.on("get", "get") { command = Command::Get }
  parser.on("put", "put") { command = Command::Put }
  parser.on("del", "delete") { command = Command::Delete }
end

case command
in Command::Get
  puts "get"
in Command::Put
  puts "put"
in Command::Delete
  puts "delete"
in Command::Unknown
  STDERR.puts "E: unknown"
  exit 1
end
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






























































Deleted share/examples/crystal-enum-pattern-matching/types.cr.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
# using classes appears to require that all types are handled

require "option_parser"

class Get; end
class Put; end
class Delete; end
class Unknown; end

command = Unknown.new

OptionParser.parse do |parser|
  parser.on("get", "get") { command = Get.new }
  parser.on("put", "put") { command = Put.new }
  parser.on("del", "delete") { command = Delete.new }
end

def run(c : Get)
  puts "get"
end

def run(c : Put)
  puts "put"
end

def run(c : Delete)
  puts "delete"
end

def run(c : Unknown)
  STDERR.puts "E: unknown"
end

run(command)
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




































































Deleted share/examples/duckdb_cross_db_join.sh.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
#!/bin/sh
set -e

work=$(mktemp -d -t duckdb-cross-db)
cd $work

cat <<EOF | duckdb
attach 'db1.db';
create table db1.ids (id integer primary key, name string not null);
insert into db1.ids values(1, 'pat');
insert into db1.ids values(2, 'edgar');

attach 'db2.db';
create table db2.traits (name string primary key, species string not null);
insert into db2.traits values('pat', 'human');
insert into db2.traits values('edgar', 'dog');

select ids.*, traits.species from db1.ids ids join db2.traits traits on ids.name=traits.name;
EOF

echo "DONE: $work"
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<










































Deleted share/examples/jail.conf.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
path = "/jails/${name}";
vnet;
$iface = "$name";
vnet.interface = "${iface}b";
host.hostname = "$name";
exec.clean;
exec.start = "sh /etc/rc";
exec.stop = "sh /etc/rc.shutdown jail";
exec.release = "ifconfig ${iface}a destroy";
enforce_statfs = 1;
devfs_ruleset = 6;
allow.mount;
allow.mount.devfs;
mount.devfs;

pdr_nginx {
  $ip = "192.168.2.3/24";
}

jailtest {
  exec.prepare = "~patmaddox/bin/jectl zroot/JAILS/$name boot";
  exec.prepare += "~patmaddox/bin/jlprep $name $ip $iface";
  $ip = "192.168.2.4/24";
}

porttest {
#  exec.prepare = "~patmaddox/bin/jectl zroot/JAILS/$name boot";
  exec.prepare += "~patmaddox/bin/jlprep $name $ip $iface";
  $ip = "192.168.2.5/24";
}

nginx_example {
  exec.prepare += "~patmaddox/bin/jlprep $name $ip $iface";
  $ip = "192.168.2.6/24";
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






































































Deleted share/examples/pf.conf.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
ext_if = "em0"
jail_if = "jails"
jail_net = $jail_if:network
bhyve_if = "bhyves"
bhyve_net = $bhyve_if:network

set skip on lo
scrub in

nat on $ext_if from $jail_net -> ($ext_if:0)
nat on $ext_if from $bhyve_net -> ($ext_if:0)

pass out

block in

pass in proto tcp to port { 22 }
pass in inet proto icmp icmp-type { echoreq }
pass from $jail_net
pass on $bhyve_if from $bhyve_net
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































Deleted share/examples/pkg-create/Makefile.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
PKG = pkg-create-example-${VERSION}.pkg
VERSION= 1.0.0
PREFIX= /usr/local

.PHONY: all clean clean-stage

all: ${PKG}
clean: clean-stage
	rm -f ${PKG} pkg-plist manifest
clean-stage:
	rm -rf stage

SRC_FILES = ${:! find src -type f !}

${PKG}: clean-stage ${SRC_FILES}
	mkdir -p stage${PREFIX}
	cp -Rp src/ stage${PREFIX}/
	cat manifest.in | sed -e 's|%%VERSION%%|${VERSION}|' | sed -e 's|%%PREFIX%%|${PREFIX}|' > manifest
	echo ${:! find stage -type f !:C/^stage//} | tr ' ' '\n' > pkg-plist
	pkg create -M manifest -r stage -p pkg-plist
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































Deleted share/examples/pkg-create/manifest.in.

1
2
3
4
5
6
7
8
name: pkg-create-example
version: %%VERSION%%
origin: devel/pkg-create-example
comment: Example demonstrating how to create a package using pkg-create(8)
www: https://github.com/patmaddox/home/tree/main/examples/pkg-create-example
maintainer: pat@patmaddox.com
prefix: %%PREFIX%%
desc: Example demonstrating how to create a package using pkg-create(8)
<
<
<
<
<
<
<
<
















Deleted share/examples/pkg-create/src/share/pkg-create-example/bar.

1
hello bar
<


Deleted share/examples/pkg-create/src/share/pkg-create-example/foo.

1
hello foo
<


Deleted share/examples/pkg-create/src/share/pkg-create-example/subdir/baz.

1
hello baz
<


Deleted share/experiments/bhyve/Makefile.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
VM=	newhost
VERS=	13.2-RELEASE
DOWNLOAD= https://download.freebsd.org/releases/amd64/${VERS}

.if ${VERS} == CURRENT
DOWNLOAD= https://download.freebsd.org/snapshots/amd64/15.0-CURRENT
.endif

.PHONY: new-vm dist

dist: dist/${VERS}-base.txz dist/${VERS}-kernel.txz

dist/${VERS}-base.txz:
	fetch -o ${.TARGET} ${DOWNLOAD}/base.txz

dist/${VERS}-kernel.txz:
	fetch -o ${.TARGET} ${DOWNLOAD}/kernel.txz

new-vm: dist
	./bhyve.sh new ${VM}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































Deleted share/experiments/bhyve/bhyve.sh.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
#!/bin/sh
set -e

# to mount and access the disk on the host:
# mdconfig -a -t vnode -f guest.img
# zpool import -f -R /mnt/myguest -N 16143602688976745622 -t zguest
# zfs mount -a
# zpool export zguest
# mdconfig -d -u 0

# configure a new disk
# truncate -s 10G new.img
# mdconfig -a -t vnode -f new.img
# gpart create -s gpt /dev/md0
# gpart add -a 4k -t freebsd-zfs /dev/md0
# zpool create -m none -o altroot=/mnt/myguest -o autoexpand=on -O atime=off -t zguest zroot /dev/md0p1


# this relies on host networking and NAT
## /etc/rc.conf:
# ifconfig_bridge1_name="bhyves"
# ifconfig_bhyves="inet 192.168.3.1/24 up"
#
## /etc/pf.conf:
# bhyve_if = "bhyves"
# bhyve_net = $bhyve_if:network
# 
# set skip on lo
# scrub in
# 
# nat on $ext_if from $bhyve_net -> ($ext_if:0)
# 
# pass out
# 
# block in
# 
# pass in proto tcp to port { 22 }
# pass in inet proto icmp icmp-type { echoreq }
# pass on $bhyve_if from $bhyve_net
##

load_vmm()
{
    if ! kldstat | awk '{print $5}' | grep '^vmm.ko$'; then
	kldload vmm
    fi
}

_ifconfig()
{
    iface=tap-${1}
    if ! ifconfig $iface > /dev/null; then
	ifconfig tap create name $iface
    fi
    (ifconfig bhyves | grep "member: $iface" > /dev/null) || ifconfig bhyves addm $iface
}

usage()
{
cat<<EOF
Usage:
  ./bhyve.sh boot <vm>
  ./bhyve.sh new <vm>
  ./bhyve.sh mount <vm>
  ./bhyve.sh unmount <vm>
EOF
exit 1
}

boot()
{
    vm=$1
    img=vms/${vm}.img
    if [ ! -f $img ]; then
	echo "error: $img not found"
	exit 1
    fi
    load_vmm > /dev/null
    _ifconfig $vm
    bhyveload -c stdio -m 4096M -d $img $vm
    bhyve -c 4 -m 4096M -s 0:0,hostbridge -s 1:0,lpc -s 2:0,virtio-net,${iface} -s 3:0,virtio-blk,${img} -H -A -P -l com1,stdio $vm
}

new_vm()
{
    vm=$1
    img=vms/${vm}.img
    root=/mnt/bhyve-${vm}
    vers=${VERS:-13.2-RELEASE}
    base=dist/${vers}-base.txz
    kernel=dist/${vers}-kernel.txz

    if [ ! -f $base -o ! -f $kernel ]; then
	echo "error: make sure $base and $kernel are present"
	echo "suggestion: make dist"
	exit 1
    fi

    mkdir -p vms
    truncate -s 24G $img
    mdconfig -a -t vnode -f $img
    gpart create -s gpt /dev/md0
    gpart add -a 4k -t freebsd-zfs /dev/md0
    zpool create -m / -o altroot=${root} -o autoexpand=on -O atime=off -t bhyve-${vm} zroot /dev/md0p1
    tar -C $root -xzf $base
    tar -C $root -xzf $kernel
    touch ${root}/etc/fstab
    sysrc -f ${root}/etc/rc.conf hostname="${vm}"
    sysrc -f ${root}/etc/rc.conf zfs_enable="YES"
    sysrc -f ${root}/boot/loader.conf zfs_load="YES"
    unmount $vm
}

unmount()
{
    vm=$1
    zpool export bhyve-${vm}
    md=$(mdconfig -l -f vms/${vm}.img | sed -e 's/^md//')
    mdconfig -d -u $md
    rmdir /mnt/bhyve-${vm}
}

_mount()
{
    vm=$1
    img=vms/${vm}.img
    if [ ! -f $img ]; then
	echo "error: $img not found"
	exit 1
    fi
    md=$(mdconfig -a -f $img)
    zid=$(zpool import -d /dev/${md}p1 | grep '^[[:space:]]* id: [[:digit:]]*' | awk '{print $2}')
    zpool import -f -R /mnt/bhyve-${vm} -t $zid bhyve-${vm}
}

case $1 in
    boot)
	if [ -z "$2" ]; then usage; fi
	boot $2
	;;
    new)
	if [ -z "$2" ]; then usage; fi
	new_vm $2
	;;
    mount)
	if [ -z "$2" ]; then usage; fi
	_mount $2
	;;
    unmount)
	if [ -z "$2" ]; then usage; fi
	unmount $2
	;;
    *)
	usage
	;;
esac
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
























































































































































































































































































































Deleted share/experiments/bhyve/bin/boot.sh.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
#!/bin/sh

vm=$1
iface=tap-${vm}
img=vms/${vm}.img

if [ ! -f $img ]; then
    echo "error: $img not found"
    exit 1
fi

bhyve_exit=0

while [ $bhyve_exit -eq 0 ]; do
    bhyve -c 4 -m 4096M \
	  -s 0:0,hostbridge \
	  -s 1:0,lpc \
	  -s 2:0,virtio-net,${iface} \
	  -s 3:0,virtio-blk,${img} \
	  -H -A -P \
	  -l com1,stdio \
	  -l bootrom,/usr/local/share/uefi-firmware/BHYVE_UEFI.fd \
	  $vm
    bhyve_exit=$?
done
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


















































Deleted share/experiments/bhyve/bin/bridge.sh.

1
2
3
4
5
#!/bin/sh
set -e
if ! ifconfig $1 > /dev/null ; then
    ifconfig bridge create inet $2 name $1 up
fi
<
<
<
<
<










Deleted share/experiments/bhyve/bin/config.sh.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
#!/bin/sh

rc=${1}-rc.conf
root=/mnt/bhyve-${1}
vm_rc=${root}/etc/rc.conf

if [ ! -f $rc ]; then
   echo "error: missing $rc"
   exit 1
fi

sysrc -f ${root}/boot/loader.conf zfs_load="YES"

sysrc -f ${vm_rc} zfs_enable="YES"
commands=$(sed -e "s|^|sysrc -f $vm_rc |" $rc)
eval "$commands"
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
































Deleted share/experiments/bhyve/bin/mount.sh.

1
2
3
4
5
6
7
8
9
10
11
12
13
#!/bin/sh

vm=$1
img=vms/${vm}.img

if [ ! -f $img ]; then
    echo "error: $img not found"
    exit 1
fi

md=$(mdconfig -a -f $img)
zid=$(zpool import -d /dev/${md}p1 | grep '^[[:space:]]* id: [[:digit:]]*' | awk '{print $2}')
zpool import -f -R /mnt/bhyve-${vm} -t $zid bhyve-${vm}
<
<
<
<
<
<
<
<
<
<
<
<
<


























Deleted share/experiments/bhyve/bin/new-vm.sh.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
#!/bin/sh
set -e

vm=$1
img=vms/${vm}.img
root=/mnt/bhyve-${vm}
boot=${root}-boot
vers=${VERS:-13.2-RELEASE}
base=dist/${vers}-base.txz
kernel=dist/${vers}-kernel.txz

if [ ! -f $base -o ! -f $kernel ]; then
    echo "error: make sure $base and $kernel are present"
    echo "suggestion: make dist"
    exit 1
fi

mkdir -p vms
truncate -s 24G $img

md=$(mdconfig -a -t vnode -f $img)
disk=/dev/${md}

# boot partition
gpart create -s gpt $disk
gpart add -a 4k -s 40M -t efi $disk
newfs_msdos -F 32 -c 1 ${disk}p1

# root partition
gpart add -a 4k -t freebsd-zfs $disk
zpool create -m / -o altroot=${root} -o autoexpand=on -O atime=off -t bhyve-${vm} zroot ${disk}p2
tar -C $root -xzf $base
tar -C $root -xzf $kernel
touch ${root}/etc/fstab

# copy boot loader
mount -t msdosfs -o longnames ${disk}p1 $boot
mkdir -p ${boot}/EFI/BOOT
cp ${root}/boot/loader.efi ${boot}/EFI/BOOT/BOOTX64.efi
umount $boot
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
















































































Deleted share/experiments/bhyve/bin/tap.sh.

1
2
3
4
5
6
7
8
9
10
11
#!/bin/sh
set -e

iface=tap-${1}
bridge=$2

if ! ifconfig $iface > /dev/null; then
    ifconfig tap create name $iface
fi

(ifconfig $bridge | grep "member: $iface" > /dev/null) || ifconfig $bridge addm $iface
<
<
<
<
<
<
<
<
<
<
<






















Deleted share/experiments/bhyve/bin/unmount.sh.

1
2
3
4
5
6
7
#!/bin/sh

vm=$1
zpool export bhyve-${vm}
md=$(mdconfig -l -f vms/${vm}.img | sed -e 's/^md//')
mdconfig -d -u $md
rmdir /mnt/bhyve-${vm}
<
<
<
<
<
<
<














Deleted share/experiments/bhyve/example-rc.conf.

1
2
3
hostname="example"
ifconfig_vtnet0="192.168.6.2/24"
defaultrouter="192.168.6.1"
<
<
<






Deleted share/experiments/bhyve/example.mk.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
.PHONY: vm bridge iface boot

vm: vms/example.img

bridge:
	./bin/bridge.sh bh2 192.168.5.1/24

iface: bridge
	./bin/tap.sh example bh2

vms/example.img: example-rc.conf
	if [ -f ${.TARGET} ]; then ./bin/mount.sh example; else ./bin/new-vm.sh example; fi
	./bin/config.sh example
	./bin/unmount.sh example

boot: vms/example.img iface
	./bin/boot.sh example

destroy:
	if [ -f /dev/vmm/example ]; then bhyvectl --destroy --vm=example; fi
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































Deleted share/experiments/c/Makefile.

1
2
3
4
5
6
7
8
9
10
11
12
CFLAGS= -Wall -Werror # = instead of += because -O2 results in term-loop not incrementing

PROGS=
MAN=

PROGS+= pointer-size
SRCS.pointer-size= pointer_size.c

PROGS+= term-loop
SRCS.term-loop= term_loop.c

.include <bsd.progs.mk>
<
<
<
<
<
<
<
<
<
<
<
<
























Deleted share/experiments/c/pointer_size.c.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
#include <stdio.h>
#include <malloc.h>
#include <string.h>

int main() {
  char *s1 = strdup("hello");
  size_t size1 = malloc_usable_size(s1);
  int len1 = strlen(s1);
  printf("s1\n\tsize: %lu\n\tlen: %i\n", size1, len1);

  char *s2 = strdup("hello world are you there");
  size_t size2 = malloc_usable_size(s2);
  int len2 = strlen(s2);
  printf("s2\n\tsize: %lu\n\tlen: %i\n", size2, len2);

  return 0;
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


































Deleted share/experiments/c/term_loop.c.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
#include <stdio.h>
#include <signal.h>

void report();

static int count = 0;

int main() {
  signal(SIGINFO, report);

  while(1) {
    ++count;
  }

  return 0;
}

void report() {
  printf("%i\n", count);
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































Deleted share/experiments/dynmk/.gitignore.

1
2
out
out2
<
<




Deleted share/experiments/dynmk/Makefile.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
.MAIN: all

SRCSUB = C/^src\///
OUTSUB = C/^out\///

clean:
	rm -rf out out2

INFILES != find src -type f -not -name '*~'

.for f in ${INFILES}
out/${f:${SRCSUB}}: out ${f}
	cp ${f} ${.TARGET}
all: out/${f:${SRCSUB}}
.endfor

out:
	mkdir out
out2:
	mkdir out2

.if exists(out)
NEWFILES != find out -type f
.for f in ${NEWFILES}
out2/${f:${OUTSUB}}: out2 ${f}
	cp ${f} ${.TARGET}

all: out2/${f:${OUTSUB}}
.endfor
.endif
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




























































Deleted share/experiments/dynmk/README.md.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
# Dynamic Make

What I want: run `make` one time to produce files under `out/` and `out2/`.

What happens: I have to run `make` twice.
The first run produces `out/*`, and the second produces `out2/*`.

Why? As I understand it, make builds its targets statically in one pass.
The first time this runs, there is nothing in `out/`, so `NEWFILES != find out -type f` is empty.

Is there a way to add targets after another target runs, or as part of a run?
I don't want to evaluate the `out2/*` targets until after `out/*` has completed.

I have tried "recursive make" where I define a target like:

```make
all:
	make out
	make out2
```

The problem I've found with that is that stderr doesn't seem to be redirected properly.
Any stderr from `make out2` is printed on stdout on the top-level make.

I would like to do this with a single non-recursive make target if possible.
It may not be possible though.
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




















































Deleted share/experiments/dynmk/src/bar.

Deleted share/experiments/dynmk/src/foo.

Deleted share/experiments/elixir-benchmarks/BSDmakefile.

1
2
3
4
5
6
LIB= nif_uuid_gen
SRCS= nif_uuid_gen.c
SHLIB_NAME= ${LIB}.so
MK_MAN= no

.include <bsd.lib.mk>
<
<
<
<
<
<












Deleted share/experiments/elixir-benchmarks/nif_uuid_gen.c.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
#include <stdio.h>
#include <sys/uuid.h>
#include <uuid.h>
#include "/usr/local/lib/erlang25/usr/include/erl_nif.h"

static ERL_NIF_TERM uuid_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) {
  char *uuid_str;
  uuid_t uuid;
  uint32_t status;

  uuid_create(&uuid, &status);
  if (status != uuid_s_ok) {
    return enif_raise_exception(env, enif_make_string(env, "error creating uuid", ERL_NIF_LATIN1));
  }

  uuid_to_string(&uuid, &uuid_str, &status);
  if (status != uuid_s_ok) {
    return enif_raise_exception(env, enif_make_string(env, "error converting uuid to string", ERL_NIF_LATIN1));
  }

  return enif_make_string(env, uuid_str, ERL_NIF_LATIN1);
}

static ErlNifFunc nif_funcs[] = {
  {"uuid", 0, uuid_nif}
};

ERL_NIF_INIT(Elixir.UuidNif, nif_funcs, NULL, NULL, NULL, NULL)
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
























































Deleted share/experiments/elixir-benchmarks/uuid.exs.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
Mix.install([
  {:faker, "~> 0.17.0"},
  {:uuid, "~> 2.0", hex: :uuid_erl},
])

require Logger

benchmark = fn name, func ->
  Logger.debug("BEG #{name}")
  {time, _} = :timer.tc(fn ->
    Enum.each(1..2_000_000, fn _ -> func.() end)
  end)
  seconds = time / 1_000_000
  Logger.debug("END #{name} (#{seconds} seconds)")
end

# uuid_erl - disabled because string version is slow
#benchmark.(":uuid.get_v4_random()", fn -> :uuid.get_v4_urandom() end)
#benchmark.(":uuid.get_v4_random() to string", fn -> :uuid.get_v4_urandom() |> :uuid.uuid_to_string() end)

# faker
benchmark.("faker", fn -> Faker.UUID.v4() end)

defmodule UuidNif do
  @on_load :init

  def init do
    :ok = :erlang.load_nif("./nif_uuid_gen", 0)
  end

  def uuid, do: exit(:nif_not_loaded)
end

# NIF
benchmark.("nif", &UuidNif.uuid/0)
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






































































Deleted share/experiments/fake-rootkit/Makefile.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
.PHONY: setup spec verify rootkit verify-offline clean

SPEC= spec.mtree

setup:
	mkdir -p host/bin
	cp bin-good/mtree host/bin/
	cp bin-good/md5sum host/bin/

spec: ${SPEC}
${SPEC}:
	PATH=./host/bin:$$PATH mtree -c -p host > ${.TARGET}

verify: ${SPEC}
	PATH=./host/bin:$$PATH mtree -p host < ${SPEC}
	PATH=./host/bin:$$PATH md5sum host/bin/mtree
	PATH=./host/bin:$$PATH md5sum host/bin/md5sum

verify-offline:
	mtree -p host < ${SPEC} || echo "mtree does not match!"
	md5sum host/bin/mtree
	md5sum host/bin/md5sum

rootkit:
	cp host/bin/mtree host/bin/orig-mtree
	cp host/bin/md5sum host/bin/orig-md5sum
	cp bin-bad/mtree host/bin/
	cp bin-bad/md5sum host/bin/

clean:
	rm -rf host/bin spec.mtree
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






























































Deleted share/experiments/fake-rootkit/README.md.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
# a fake rootkit to thwart mtree and md5sum verification

*this is not a real rootkit* (that's what they all say)

[The FreeBSD Handbook suggests using `mtree(8)` to verify system files and binaries.](https://docs.freebsd.org/en/books/handbook/security/#security-ids)

**What if mtree itself is compromised?**

This fake rootkit simulates this scenario, by replacing two binaries used for verification - `mtree(8)` and `md5sum(1)` - with compromised binaries that report misleading results:

- `bin-bad/mtree` - no-op, so the verification appears to succeed even though the tree signature has changed
- `bin-bad/md5sum` - calls original `md5sum`, passing paths to original `md5sum` and `mtree` if necessary, to disguise the fact that these have been compromised

Obviously it would not be hard to expose this naive "rootkit."
It simply illustrates how to anticipate and mislead certain verification techniques.

*Can a compromised system be trusted to report its file signatures?
As of now, I don't think so.
**A separate trusted system needs to verify the data at rest.***

## Example

```
$ make setup
mkdir -p host/bin
cp bin-good/mtree host/bin/
cp bin-good/md5sum host/bin/

$ make spec
PATH=./host/bin:$PATH mtree -c -p host > spec.mtree

$ make verify
PATH=./host/bin:$PATH mtree -p host < spec.mtree
PATH=./host/bin:$PATH md5sum host/bin/mtree
efa74a99c24a881ca30f438d8ee79441  host/bin/mtree
PATH=./host/bin:$PATH md5sum host/bin/md5sum
b13d64791e86f1381590d9cf87be8c41  host/bin/md5sum

$ make rootkit
cp host/bin/mtree host/bin/orig-mtree
cp host/bin/md5sum host/bin/orig-md5sum
cp bin-bad/mtree host/bin/
cp bin-bad/md5sum host/bin/

$ make verify
PATH=./host/bin:$PATH mtree -p host < spec.mtree
I am root mtree
PATH=./host/bin:$PATH md5sum host/bin/mtree
I am root md5sum
efa74a99c24a881ca30f438d8ee79441  host/bin/mtree
PATH=./host/bin:$PATH md5sum host/bin/md5sum
I am root md5sum
b13d64791e86f1381590d9cf87be8c41  host/bin/md5sum

$ make verify-offline
mtree -p host < spec.mtree || echo "mtree does not match!"
bin:    modification time (Fri Nov 10 01:15:11 2023, Fri Nov 10 01:15:18 2023)
extra: bin/orig-md5sum
bin/md5sum: 
	size (26, 241)
	modification time (Fri Nov 10 01:15:11 2023, Fri Nov 10 01:15:18 2023)
extra: bin/orig-mtree
bin/mtree: 
	size (29, 74)
	modification time (Fri Nov 10 01:15:11 2023, Fri Nov 10 01:15:18 2023)
mtree does not match!
md5sum host/bin/mtree
9512f5d8d9d8adf507dc76cffbe974e9  host/bin/mtree
md5sum host/bin/md5sum
a4d4589f492eef94086e329738b27107  host/bin/md5sum
```
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<














































































































































Deleted share/experiments/fake-rootkit/bin-bad/md5sum.

1
2
3
4
5
6
7
8
9
10
11
12
#!/bin/sh
sum=/sbin/md5sum

echo "I am root md5sum"

if echo "$1" | grep -q mtree; then
    $sum host/bin/orig-mtree | sed 's/orig-//'
elif echo "$1" | grep -q md5sum; then
    $sum host/bin/orig-md5sum | sed 's/orig-//'
else
    $sum $*
fi
<
<
<
<
<
<
<
<
<
<
<
<
























Deleted share/experiments/fake-rootkit/bin-bad/mtree.

1
2
3
4
5
#!/bin/sh

# no-op - don't even bother verifying!

echo "I am root mtree"
<
<
<
<
<










Deleted share/experiments/fake-rootkit/bin-good/md5sum.

1
2
#!/bin/sh
/sbin/md5sum $*
<
<




Deleted share/experiments/fake-rootkit/bin-good/mtree.

1
2
#!/bin/sh
/usr/sbin/mtree $*
<
<




Deleted share/experiments/fake-rootkit/host/data/bar.txt.

1
this is bar
<


Deleted share/experiments/fake-rootkit/host/data/foo.txt.

1
this is foo
<


Deleted share/experiments/ffi-adventure/README.md.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
# FFI Adventure

Explorations in calling C libraries from other languages.

## Languages for consideration

- C (reference)
- D
- Elixir
- Go
- Lua - `/usr/libexec/flua`
- Nim
- Pony
- Rust
- V
- Zig

## Languages to investigate

I know less about these, and may have to port some of them to FreeBSD.

- C++
- Common Lisp
- Crystal
- Haxe
- Jai
- Nit
- OCaml
- Odin
- Vale

## Examples

- hello world (pass a string to C)
- upcase (modify string in calling language)
- intentional memory leak (how easy is it?)
- concurrency (data races?)
- various other memory safety issues

## Questions

- Do any languages let you restrict access to a C library? e.g. for [a library that's not thread-safe](c093c9c4c), ensure that only one thread can access it at a time? 

## Notes

- Go was a bit easier to get going than Rust - just inline CFLAGS and LDFLAGS in the code.
  Rust setup took more time, appears to be more configurable.
- Rust provided compiler errors, e.g. CString is not FFI-safe (it needs to be turned into a pointer)
- Rust appears to automatically deallocate the memory, whereas Go needs to call C.free
- Pony was easy to get going.
- Go can use the library's header file; Rust and Pony appear to have their own definitions
- Mutation seems to require copying memory (especially for strings, because they're null-terminated)
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































































































Deleted share/experiments/ffi-adventure/concurrency/Makefile.

1
2
3
4
5
6
7
8
all:
	for m in concurrency-*/Makefile; do ${MAKE} -C $$(dirname $$m); done

run:
	for m in concurrency-*/Makefile; do echo "=== $$(dirname $$m):"; ${MAKE} -C $$(dirname $$m) run; done

clean:
	for m in concurrency-*/Makefile; do ${MAKE} -C $$(dirname $$m) clean; done
<
<
<
<
<
<
<
<
















Deleted share/experiments/ffi-adventure/concurrency/concurrency-c/Makefile.

1
2
3
4
5
6
7
8
9
10
11
12
13
_build/badbank: badbank.c _build/libbadbank.a
	@mkdir -p ${.TARGET:H}
	cc -o ${.TARGET} -Wall -Werror -L_build -lbadbank -lpthread ${.ALLSRC:[1]}

_build/libbadbank.a: libbadbank.c
	@mkdir -p ${.TARGET:H}
	cc -fPIC -c -o ${.TARGET} -Wall -Werror ${.ALLSRC}

run: _build/badbank
	./_build/badbank

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
<
<
<
<
<


























Deleted share/experiments/ffi-adventure/concurrency/concurrency-c/badbank.c.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
#include <stdio.h>
#include <pthread.h>
#include "libbadbank.h"

static void *_deposit() {
  for(int i = 0; i < 1000000; i++) {
    deposit(100);
  }
  return NULL;
}

int main() {
  pthread_t t1;
  pthread_create(&t1, NULL, _deposit, NULL);

  pthread_t t2;
  pthread_create(&t2, NULL, _deposit, NULL);

  pthread_join(t1, NULL);
  pthread_join(t2, NULL);

  printf("Balance: $%i\n", balance());
  return 0;
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
















































Deleted share/experiments/ffi-adventure/concurrency/concurrency-c/libbadbank.c.

1
2
3
4
5
6
7
8
9
10
11
12
#include <stdio.h>
#include "libbadbank.h"

static int _balance = 0;

void deposit(int amount) {
  _balance += amount;
}

int balance() {
  return _balance;
}
<
<
<
<
<
<
<
<
<
<
<
<
























Deleted share/experiments/ffi-adventure/concurrency/concurrency-c/libbadbank.h.

1
2
3
4
5
6
7
#ifndef __LIBBADBANK
#define __LIBBADBANK

void deposit(int);
int balance();

#endif
<
<
<
<
<
<
<














Deleted share/experiments/ffi-adventure/concurrency/concurrency-go/Makefile.

1
2
3
4
5
6
7
8
_build/badbank: badbank.go
	go build -o ${.TARGET} ${.ALLSRC}

run: _build/badbank
	./_build/badbank

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
















Deleted share/experiments/ffi-adventure/concurrency/concurrency-go/badbank.go.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
package main

/*
#cgo CFLAGS: -I../concurrency-c
#cgo LDFLAGS: -L../concurrency-c/_build -lbadbank
#include <stdlib.h>
#include "libbadbank.h"
*/
import "C"
import "fmt"

func main() {
     defer printBalance()
     done1 := deposit()
     done2 := deposit()
     <-done1
     <-done2
}

func printBalance() {
  fmt.Printf("Balance: $%v\n", C.balance())
}

func deposit() <-chan struct{} {
  done := make(chan struct{})
  go func() {
    for i := 0; i < 1000000; i++ {
      C.deposit(100)
    }
    done <- struct{}{}
  }()
  return done
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


































































Deleted share/experiments/ffi-adventure/concurrency/concurrency-pony/Makefile.

1
2
3
4
5
6
7
8
_build/badbank: badbank.pony
	ponyc -o ${.TARGET:H} -b ${.TARGET:T} .

run: _build/badbank
	./_build/badbank

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
















Deleted share/experiments/ffi-adventure/concurrency/concurrency-pony/badbank.pony.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
use "collections"
use "path:../concurrency-c/_build"
use "lib:badbank"
use @deposit[None](amount: U32)
use @balance[U32]()

actor Main
  var _env: Env
  var _count: U32 = 0

  new create(env: Env) =>
    _env = env
    start_depositor()
    start_depositor()

  be start_depositor() =>
    let d: Depositor = Depositor.create()
    d.deposit(this)
    _count = _count + 1

  be done() =>
    _count = _count - 1
    if _count == 0 then
      _env.out.print("Balance: $" + @balance().string())
    end

actor Depositor
  be deposit(main: Main) =>
    var count: U32 = 0
    while count < 1000000 do
      @deposit(100)
      count = count + 1
    end
    main.done()
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




































































Deleted share/experiments/ffi-adventure/hello-world/Makefile.

1
2
3
4
5
6
7
8
all:
	for m in hello-world-*/Makefile; do ${MAKE} -C $$(dirname $$m); done

run:
	for m in hello-world-*/Makefile; do echo "=== $$(dirname $$m):"; ${MAKE} -C $$(dirname $$m) run; done

clean:
	for m in hello-world-*/Makefile; do ${MAKE} -C $$(dirname $$m) clean; done
<
<
<
<
<
<
<
<
















Deleted share/experiments/ffi-adventure/hello-world/hello-world-c/Makefile.

1
2
3
4
5
6
7
8
9
10
11
12
13
_build/hello: hello.c _build/libhello.a
	@mkdir -p ${.TARGET:H}
	cc -o ${.TARGET} -Wall -Werror -L_build -lhello ${.ALLSRC:[1]}

_build/libhello.a: libhello.c
	@mkdir -p ${.TARGET:H}
	cc -fPIC -c -o ${.TARGET} -Wall -Werror ${.ALLSRC}

run: _build/hello
	./_build/hello

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
<
<
<
<
<


























Deleted share/experiments/ffi-adventure/hello-world/hello-world-c/hello.c.

1
2
3
4
5
6
#include "libhello.h"

int main() {
  hello("C");
  return 0;
}
<
<
<
<
<
<












Deleted share/experiments/ffi-adventure/hello-world/hello-world-c/libhello.c.

1
2
3
4
5
6
7
8
9
10
#include <stdio.h>
#include "libhello.h"

void hello(const char *name) {
  printf("Hello, %s!\n", name);
}

void helloWorld() {
  hello("World");
}
<
<
<
<
<
<
<
<
<
<




















Deleted share/experiments/ffi-adventure/hello-world/hello-world-c/libhello.h.

1
2
3
4
5
6
7
#ifndef __LIBHELLO
#define __LIBHELLO

void hello(const char *name);
void helloWorld();

#endif
<
<
<
<
<
<
<














Deleted share/experiments/ffi-adventure/hello-world/hello-world-cpp/Makefile.

1
2
3
4
5
6
7
8
9
_build/hello: hello.cpp ../hello-world-c/_build/libhello.a
	@mkdir -p ${.TARGET:H}
	c++ -o ${.TARGET} -Wall -Werror -I../hello-world-c -L../hello-world-c/_build -lhello ${.ALLSRC:[1]}

run: _build/hello
	./_build/hello

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
<


















Deleted share/experiments/ffi-adventure/hello-world/hello-world-cpp/hello.cpp.

1
2
3
4
5
6
7
8
9
extern "C"
{
#include "libhello.h"
}

int main() {
  hello("C++");
  return 0;
}
<
<
<
<
<
<
<
<
<


















Deleted share/experiments/ffi-adventure/hello-world/hello-world-crystal/Makefile.

1
2
3
4
5
6
7
8
9
_build/hello: hello.cr
	@mkdir -p ${.TARGET:H}
	crystal build -o ${.TARGET} hello.cr

run: _build/hello
	./_build/hello

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
<


















Deleted share/experiments/ffi-adventure/hello-world/hello-world-crystal/hello.cr.

1
2
3
4
5
6
7
8
@[Link(lib: "hello", ldflags: "-L#{__DIR__}/../hello-world-c/_build")]
lib LibHello
  fun hello_world = helloWorld()
  fun hello(name : UInt8*)
end

LibHello.hello_world()
LibHello.hello("Crystal")
<
<
<
<
<
<
<
<
















Deleted share/experiments/ffi-adventure/hello-world/hello-world-d/Makefile.

1
2
3
4
5
6
7
8
_build/hello: hello.d
	ldmd2 -L=-L../hello-world-c/_build -L=-lhello -of=${.TARGET} ${.ALLSRC}

run: _build/hello
	./_build/hello

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
















Deleted share/experiments/ffi-adventure/hello-world/hello-world-d/hello.d.

1
2
3
4
5
6
7
extern (C) void hello(immutable char *);

import std.string;

void main() {
  hello(std.string.toStringz("D"));
}
<
<
<
<
<
<
<














Deleted share/experiments/ffi-adventure/hello-world/hello-world-elixir/.formatter.exs.

1
2
3
4
# Used by "mix format"
[
  inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"]
]
<
<
<
<








Deleted share/experiments/ffi-adventure/hello-world/hello-world-elixir/Makefile.

1
2
3
4
5
6
7
8
9
10
_build/hello_nif.so: ext/hello_nif.c
	@mkdir -p ${.TARGET:H}
	clang -o _build/hello_nif.o -I/usr/local/lib/erlang/usr/include -I../hello-world-c -c -fPIC ${.ALLSRC}
	clang -shared -o ${.TARGET} -L../hello-world-c/_build -lhello _build/hello_nif.o

run: _build/hello_nif.so
	./hello

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
<
<




















Deleted share/experiments/ffi-adventure/hello-world/hello-world-elixir/README.md.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
# HelloWorld

**TODO: Add description**

## Installation

If [available in Hex](https://hex.pm/docs/publish), the package can be installed
by adding `hello_world` to your list of dependencies in `mix.exs`:

```elixir
def deps do
  [
    {:hello_world, "~> 0.1.0"}
  ]
end
```

Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc)
and published on [HexDocs](https://hexdocs.pm). Once published, the docs can
be found at <https://hexdocs.pm/hello_world>.

<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<










































Deleted share/experiments/ffi-adventure/hello-world/hello-world-elixir/ext/hello_nif.c.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
#include <erl_nif.h>
#include "libhello.h"

#define MAXBUFLEN 1024

static ERL_NIF_TERM hello_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) {
  unsigned *len;
  char name[MAXBUFLEN];
  enif_get_string(env, argv[0], name, MAXBUFLEN, ERL_NIF_LATIN1);
  hello(name);
  return enif_make_atom(env, "ok");
}

static ErlNifFunc nif_funcs[] = {
  {"hello", 1, hello_nif}
};

ERL_NIF_INIT(Elixir.HelloWorld, nif_funcs, NULL, NULL, NULL, NULL)
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




































Deleted share/experiments/ffi-adventure/hello-world/hello-world-elixir/hello.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
#!/usr/bin/env elixir

defmodule HelloWorld do
  @on_load :load_nifs

  def load_nifs do
    :erlang.load_nif('./_build/hello_nif', 0)
  end

  def hello, do: :ok = hello('Elixir') # elrang NIF requires a charlist

  def hello(_name), do: raise "NIF hello/1 not implemented"
end

HelloWorld.hello()
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






























Deleted share/experiments/ffi-adventure/hello-world/hello-world-go/Makefile.

1
2
3
4
5
6
7
8
_build/hello: hello.go ../hello-world-c/_build/libhello.a
	go build -o ${.TARGET} ${.ALLSRC:M*.go}

run: _build/hello
	./_build/hello

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
















Deleted share/experiments/ffi-adventure/hello-world/hello-world-go/hello.go.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
package main

/*
#cgo CFLAGS: -I../hello-world-c
#cgo LDFLAGS: -L../hello-world-c/_build -lhello
#include <stdlib.h>
#include "libhello.h"
*/
import "C"
import "unsafe"

func main() {
	hello()
}

func hello() {
	name := C.CString("Go")
	defer C.free(unsafe.Pointer(name))
	C.hello(name)
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































Deleted share/experiments/ffi-adventure/hello-world/hello-world-nim/Makefile.

1
2
3
4
5
6
7
8
9
10
11
PATH:= ${PATH}:/usr/local/nim/bin
.export PATH

_build/hello: hello.nim
	nim c -o:hello --outdir:_build --cincludes:../hello-world-c --clibdir:../hello-world-c/_build -l:-lhello hello.nim

run: _build/hello
	./_build/hello

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
<
<
<






















Deleted share/experiments/ffi-adventure/hello-world/hello-world-nim/hello.nim.

1
2
3
4
# header may be optional since it can find the symbol in lib
proc hello(name: cstring) {.header: "libhello.h", importc: "hello"}

hello("Nim")
<
<
<
<








Deleted share/experiments/ffi-adventure/hello-world/hello-world-pony/Makefile.

1
2
3
4
5
6
7
8
_build/hello: hello.pony
	ponyc -o ${.TARGET:H} -b ${.TARGET:T} .

run: _build/hello
	./_build/hello

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
















Deleted share/experiments/ffi-adventure/hello-world/hello-world-pony/hello.pony.

1
2
3
4
5
6
7
use "path:../hello-world-c/_build"
use "lib:hello"
use @hello[None](name: Pointer[U8] tag)

actor Main
  new create(env: Env) =>
    @hello("Pony".cstring())
<
<
<
<
<
<
<














Deleted share/experiments/ffi-adventure/hello-world/hello-world-rust/.cargo/config.toml.

1
2
[build]
target-dir = "_build"
<
<




Deleted share/experiments/ffi-adventure/hello-world/hello-world-rust/.gitignore.

1
/target
<


Deleted share/experiments/ffi-adventure/hello-world/hello-world-rust/Cargo.lock.

1
2
3
4
5
6
7
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3

[[package]]
name = "hello-world-rust"
version = "0.1.0"
<
<
<
<
<
<
<














Deleted share/experiments/ffi-adventure/hello-world/hello-world-rust/Cargo.toml.

1
2
3
4
5
6
7
8
[package]
name = "hello-world-rust"
version = "0.1.0"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
<
<
<
<
<
<
<
<
















Deleted share/experiments/ffi-adventure/hello-world/hello-world-rust/Makefile.

1
2
3
4
5
6
7
8
9
10
.PHONY: all run clean

all:
	cargo build

run:
	cargo run

clean:
	cargo clean
<
<
<
<
<
<
<
<
<
<




















Deleted share/experiments/ffi-adventure/hello-world/hello-world-rust/build.rs.

1
2
3
4
fn main() {
    println!("cargo:rustc-link-search=../hello-world-c/_build");
    println!("cargo:rustc-link-lib=hello");
}
<
<
<
<








Deleted share/experiments/ffi-adventure/hello-world/hello-world-rust/src/main.rs.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
use std::ffi::CString;
use std::os::raw::c_char;

fn main() {
    myhello()
}

fn myhello() {
    unsafe {
        let name = CString::new("Rust").expect("CString::new failed");
        hello(name.as_ptr());
    }
}

#[link(name = "hello", kind = "static")]
extern "C" {
    fn hello(name: *const c_char);
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




































Deleted share/experiments/ffi-adventure/hello-world/hello-world-v/Makefile.

1
2
3
4
5
6
7
8
9
_build/hello: hello.v
	@mkdir -p ${.TARGET:H}
	v -o ${.TARGET} ${.ALLSRC}

run: _build/hello
	./_build/hello

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
<


















Deleted share/experiments/ffi-adventure/hello-world/hello-world-v/hello.v.

1
2
3
4
5
6
7
8
9
10
11
#flag -lhello
#flag -L../hello-world-c/_build
#flag -I../hello-world-c
#include "libhello.h"

fn C.hello(&char) int

fn main() {
  C.hello("V".str)
}

<
<
<
<
<
<
<
<
<
<
<






















Deleted share/experiments/ffi-adventure/hello-world/hello-world-zig/Makefile.

1
2
3
4
5
6
7
8
zig-out/hello: *.zig
	zig build

run:
	zig build run

clean:
	rm -rf zig-cache zig-out
<
<
<
<
<
<
<
<
















Deleted share/experiments/ffi-adventure/hello-world/hello-world-zig/build.zig.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
const std = @import("std");

pub fn build(b: *std.build.Builder) void {
    // Standard target options allows the person running `zig build` to choose
    // what target to build for. Here we do not override the defaults, which
    // means any target is allowed, and the default is native. Other options
    // for restricting supported target set are available.
    const target = b.standardTargetOptions(.{});

    // Standard release options allow the person running `zig build` to select
    // between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
    const mode = b.standardReleaseOptions();

    const exe = b.addExecutable("hello", "src/main.zig");
    exe.setTarget(target);
    exe.setBuildMode(mode);
    exe.addIncludePath("../hello-world-c");
    exe.addLibraryPath("../hello-world-c/_build");
    exe.linkSystemLibrary("hello");
    exe.install();

    const run_cmd = exe.run();
    run_cmd.step.dependOn(b.getInstallStep());
    if (b.args) |args| {
        run_cmd.addArgs(args);
    }

    const run_step = b.step("run", "Run the app");
    run_step.dependOn(&run_cmd.step);

    const exe_tests = b.addTest("src/main.zig");
    exe_tests.setTarget(target);
    exe_tests.setBuildMode(mode);

    const test_step = b.step("test", "Run unit tests");
    test_step.dependOn(&exe_tests.step);
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<










































































Deleted share/experiments/ffi-adventure/hello-world/hello-world-zig/src/main.zig.

1
2
3
4
5
6
7
8
9
10
const std = @import("std");

const c = @cImport({
  @cInclude("libhello.h");
});

pub fn main() !void {
  c.hello("Zig");
}

<
<
<
<
<
<
<
<
<
<




















Deleted share/experiments/ffi-adventure/libucl/Justfile.

1
2
3
4
5
6
7
8
9
10
11
12
test:
  #!/bin/sh
  mkdir -p _build
  for j in */Justfile; do
    impl=$(dirname $j)
    just -f $j run > _build/$impl
    cmp expected _build/$impl
  done

@clean:
  rm -rf _build
  for j in */Justfile; do just -f $j clean; done
<
<
<
<
<
<
<
<
<
<
<
<
























Deleted share/experiments/ffi-adventure/libucl/data.ucl.

1
2
3
4
5
greeting = "hello world"
vars {
  foo = "this is foo"
  bar = "this is bar"
}
<
<
<
<
<










Deleted share/experiments/ffi-adventure/libucl/expected.

1
2
3
hello world
this is foo
this is bar
<
<
<






Deleted share/experiments/ffi-adventure/libucl/libucl-c/Justfile.

1
2
3
4
5
6
@run:
  make > /dev/null
  ./_build/libucl-c

@clean:
  rm -rf _build
<
<
<
<
<
<












Deleted share/experiments/ffi-adventure/libucl/libucl-c/Makefile.

1
2
3
_build/libucl-c: libucl_c.c
	@mkdir -p ${.TARGET:H}
	cc -I/usr/local/include -L/usr/local/lib -lucl -Wall -Werror -o ${.TARGET} ${.ALLSRC}
<
<
<






Deleted share/experiments/ffi-adventure/libucl/libucl-c/libucl_c.c.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
#include <stdio.h>
#include <ucl.h>

int main() {
  ucl_object_t* ucl = NULL;
  const char* greeting = NULL;
  const char* foo = NULL;
  const char* bar = NULL;

  struct ucl_parser* parser = ucl_parser_new(UCL_PARSER_DEFAULT);
  ucl_parser_add_file(parser, "../data.ucl");
  if((ucl = ucl_parser_get_object(parser))) {
    ucl_object_tostring_safe(ucl_object_lookup(ucl, "greeting"), &greeting);
    ucl_object_tostring_safe(ucl_object_lookup_path(ucl, "vars.foo"), &foo);
    ucl_object_tostring_safe(ucl_object_lookup_path(ucl, "vars.bar"), &bar);
  }
  puts(greeting);
  puts(foo);
  puts(bar);
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































Deleted share/experiments/ffi-adventure/libucl/libucl-cpp/Justfile.

1
2
3
4
5
6
@run:
  make > /dev/null
  ./_build/libucl-cpp

@clean:
  rm -rf _build
<
<
<
<
<
<












Deleted share/experiments/ffi-adventure/libucl/libucl-cpp/Makefile.

1
2
3
_build/libucl-cpp: libucl_cpp.cpp
	@mkdir -p ${.TARGET:H}
	c++ -I/usr/local/include -L/usr/local/lib -lucl -Wall -Werror -o ${.TARGET} ${.ALLSRC}
<
<
<






Deleted share/experiments/ffi-adventure/libucl/libucl-cpp/libucl_cpp.cpp.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
#include <iostream>
extern "C"
{
#include <ucl.h>
}

int main() {
  ucl_object_t* ucl = NULL;
  const char* greeting = NULL;
  const char* foo = NULL;
  const char* bar = NULL;

  struct ucl_parser* parser = ucl_parser_new(UCL_PARSER_DEFAULT);
  ucl_parser_add_file(parser, "../data.ucl");
  if((ucl = ucl_parser_get_object(parser))) {
    ucl_object_tostring_safe(ucl_object_lookup(ucl, "greeting"), &greeting);
    ucl_object_tostring_safe(ucl_object_lookup_path(ucl, "vars.foo"), &foo);
    ucl_object_tostring_safe(ucl_object_lookup_path(ucl, "vars.bar"), &bar);
  }

  std::cout << greeting << "\n";
  std::cout << foo << "\n";
  std::cout << bar << "\n";
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
















































Deleted share/experiments/ffi-adventure/libucl/libucl-go/Justfile.

1
2
3
4
5
6
7
@run:
  mkdir -p _build
  go build -o _build/libucl-go *.go
  ./_build/libucl-go

@clean:
  rm -rf _build
<
<
<
<
<
<
<














Deleted share/experiments/ffi-adventure/libucl/libucl-go/libucl_go.go.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
package main

/*
#cgo CFLAGS: -I/usr/local/include
#cgo LDFLAGS: -L/usr/local/lib -lucl
#include <ucl.h>
*/
import "C"
import "fmt"
//import "unsafe"

func main() {
  var greeting *C.char
  var foo *C.char
  var bar *C.char

  var parser = C.ucl_parser_new(C.UCL_PARSER_DEFAULT)
  C.ucl_parser_add_file(parser, C.CString("../data.ucl"))
  var ucl = C.ucl_parser_get_object(parser)
  if(ucl != nil) {
    C.ucl_object_tostring_safe(C.ucl_object_lookup(ucl, C.CString("greeting")), &greeting)
    C.ucl_object_tostring_safe(C.ucl_object_lookup_path(ucl, C.CString("vars.foo")), &foo)
    C.ucl_object_tostring_safe(C.ucl_object_lookup_path(ucl, C.CString("vars.bar")), &bar)
  }

  fmt.Println(C.GoString(greeting))
  fmt.Println(C.GoString(foo))
  fmt.Println(C.GoString(bar))
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


























































Deleted share/experiments/ffi-adventure/libucl/libucl-zig/Justfile.

1
2
3
4
5
@run:
  zig build run

@clean:
  rm -rf _build
<
<
<
<
<










Deleted share/experiments/ffi-adventure/libucl/libucl-zig/build.zig.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
const std = @import("std");

pub fn build(b: *std.build.Builder) void {
    // Standard target options allows the person running `zig build` to choose
    // what target to build for. Here we do not override the defaults, which
    // means any target is allowed, and the default is native. Other options
    // for restricting supported target set are available.
    const target = b.standardTargetOptions(.{});

    // Standard release options allow the person running `zig build` to select
    // between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
    const mode = b.standardReleaseOptions();

    const exe = b.addExecutable("libucl-zig", "src/main.zig");
    exe.setTarget(target);
    exe.setBuildMode(mode);
    exe.addIncludePath("/usr/local/include");
    exe.addLibraryPath("/usr/local/lib");
    exe.linkSystemLibrary("ucl");
    exe.install();

    const run_cmd = exe.run();
    run_cmd.step.dependOn(b.getInstallStep());
    if (b.args) |args| {
        run_cmd.addArgs(args);
    }

    const run_step = b.step("run", "Run the app");
    run_step.dependOn(&run_cmd.step);

    const exe_tests = b.addTest("src/main.zig");
    exe_tests.setTarget(target);
    exe_tests.setBuildMode(mode);

    const test_step = b.step("test", "Run unit tests");
    test_step.dependOn(&exe_tests.step);
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<










































































Deleted share/experiments/ffi-adventure/libucl/libucl-zig/src/main.zig.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
const std = @import("std");

const ucl = @cImport({
  @cInclude("ucl.h");
});

pub fn main() !void {
    var parser: ?*ucl.ucl_parser = ucl.ucl_parser_new(ucl.UCL_PARSER_DEFAULT);
    var ucl_obj: ?*ucl.ucl_object_t = null;
    var greeting: [*c] const u8 = null;
    var foo: [*c] const u8 = null;
    var bar: [*c] const u8 = null;

    _ = ucl.ucl_parser_add_file(parser, "../data.ucl");
    ucl_obj = ucl.ucl_parser_get_object(parser);
    if(ucl_obj != null) {
      _ = ucl.ucl_object_tostring_safe(ucl.ucl_object_lookup(ucl_obj, "greeting"), &greeting);
      _ = ucl.ucl_object_tostring_safe(ucl.ucl_object_lookup_path(ucl_obj, "vars.foo"), &foo);
      _ = ucl.ucl_object_tostring_safe(ucl.ucl_object_lookup_path(ucl_obj, "vars.bar"), &bar);
    }

    const stdout_file = std.io.getStdOut().writer();
    var bw = std.io.bufferedWriter(stdout_file);
    const stdout = bw.writer();
    try stdout.print("{s}\n{s}\n{s}\n", .{greeting, foo, bar});
    try bw.flush();
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






















































Deleted share/experiments/ffi-adventure/libucl/uclcmd/Justfile.

1
2
3
4
5
6
7
run:
  #!/bin/sh
  for v in greeting vars.foo vars.bar; do
    uclcmd get -q -f ../data.ucl $v
  done

clean:
<
<
<
<
<
<
<














Deleted share/experiments/ffi-adventure/memleak/memleak-go/Makefile.

1
2
3
4
5
6
7
8
_build/memleak: memleak.go
	go build -o ${.TARGET} ${.ALLSRC:M*.go}

run: _build/memleak
	./_build/memleak

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
















Deleted share/experiments/ffi-adventure/memleak/memleak-go/memleak.go.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
package main

/*
This attempts to intentionally leak memory, by allocating a
C string but not freeing it when done.

It appears to not be necessary to free the memory, the
garbage collector will pick it up anyway.
*/

/*
#include <stdlib.h>
*/
import "C"

import (
"fmt"
"math"
"runtime"
"runtime/debug"
"time"
//"unsafe"
)

func main() {
	debug.SetGCPercent(-1)
	debug.SetMemoryLimit(math.MaxInt64)

	numstrings := 1000000;
	for i := 0; i < numstrings; i++ {
		makestring(i)
	}
	fmt.Printf("%v strings made\n", numstrings)

	fmt.Println("Before garbage collection:")
	reportMem()

	runtime.GC()

	fmt.Println("after garbage collection:")
	reportMem()
}

func makestring(i int) {
	C.CString(fmt.Sprintf("string %v", i));
	//  defer C.free(unsafe.Pointer(s));
}

func reportMem() {
	loops := 3
	for i := 0; i < loops; i++ {
		fmt.Printf("==== %v/%v\n", i+1, loops)
		printmem()
		time.Sleep(3 * time.Second)
	}
}
func printmem() {
	var m runtime.MemStats
	runtime.ReadMemStats(&m)
	fmt.Printf("HeapAlloc = %v\n", m.HeapAlloc)
	fmt.Printf("HeapInuse = %v\n", m.HeapInuse)
	fmt.Printf("HeapObjects = %v\n", m.HeapObjects)
	fmt.Printf("NumGC = %v\n", m.NumGC)
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
































































































































Deleted share/experiments/ffi-adventure/upcase/Makefile.

1
2
3
4
5
6
7
8
all:
	for m in upcase-*/Makefile; do ${MAKE} -C $$(dirname $$m); done

run:
	for m in upcase-*/Makefile; do echo "=== $$(dirname $$m):"; ${MAKE} -C $$(dirname $$m) run; done

clean:
	for m in upcase-*/Makefile; do ${MAKE} -C $$(dirname $$m) clean; done
<
<
<
<
<
<
<
<
















Deleted share/experiments/ffi-adventure/upcase/upcase-c/Makefile.

1
2
3
4
5
6
7
8
9
10
11
12
13
_build/upcase: upcase.c _build/libupcase.a libupcase.h
	@mkdir -p ${.TARGET:H}
	cc -o ${.TARGET} -Wall -Werror -L_build -lupcase upcase.c

_build/libupcase.a: libupcase.c libupcase.h
	@mkdir -p ${.TARGET:H}
	cc -o ${.TARGET} -Wall -Werror -c -fPIC libupcase.c

run: _build/upcase
	./_build/upcase

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
<
<
<
<
<


























Deleted share/experiments/ffi-adventure/upcase/upcase-c/libupcase.c.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
#include "libupcase.h"

int upcase(char *s) {
  while(*s) {
    *s = toupper(*s);
    s++;
  }
  return 1;
}

int upcase_n(char *s, int len) {
  int i = 0;
  while(i++ < len) {
    *s = toupper(*s);
    s++;
  }
  return 1;
}    
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




































Deleted share/experiments/ffi-adventure/upcase/upcase-c/libupcase.h.

1
2
3
4
5
6
7
8
9
#include <ctype.h>

#ifndef libupcase_h
#define libupcase_h

int upcase(char *);
int upcase_n(char *, int);

#endif
<
<
<
<
<
<
<
<
<


















Deleted share/experiments/ffi-adventure/upcase/upcase-c/upcase.c.

1
2
3
4
5
6
7
8
9
10
11
12
13
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include "libupcase.h"

int main() {
  char *s = strdup("hello c");
  if(upcase(s)) {
    printf("%s\n", s);
  }
  free(s);
  return 0;
}
<
<
<
<
<
<
<
<
<
<
<
<
<


























Deleted share/experiments/ffi-adventure/upcase/upcase-crystal/Makefile.

1
2
3
4
5
6
7
8
9
_build/upcase: upcase.cr
	@mkdir -p ${.TARGET:H}
	crystal build -o ${.TARGET} upcase.cr

run: _build/upcase
	./_build/upcase

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
<


















Deleted share/experiments/ffi-adventure/upcase/upcase-crystal/upcase.cr.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
@[Link(lib: "upcase", ldflags: "-L#{__DIR__}/../upcase-c/_build")]
lib LibUpcase
  fun upcase(name : UInt8*) : UInt32
  fun upcase_n(name : UInt8*, len : UInt32) : UInt32
end

class CString
  @bytes : Slice(UInt8)

  def initialize(str : String)
    @bytes = Slice(UInt8).new(str.size + 1)
    str.to_slice.copy_to(@bytes)
    @bytes[-1] = 0
  end

  def to_unsafe
    @bytes.to_unsafe
  end

  def to_s(io)
    io << String.new(@bytes)
  end
end

str = CString.new("hello crystal")
LibUpcase.upcase(str)
puts str
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






















































Deleted share/experiments/ffi-adventure/upcase/upcase-d/Makefile.

1
2
3
4
5
6
7
8
_build/upcase: upcase.d
	ldmd2 -L=-L../upcase-c/_build -L=-lupcase -of=${.TARGET} ${.ALLSRC}

run: _build/upcase
	./_build/upcase

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
















Deleted share/experiments/ffi-adventure/upcase/upcase-d/upcase.d.

1
2
3
4
5
6
7
8
9
10
11
extern (C) void upcase(char *);

import std.stdio;
import std.string;

void main() {
  auto s = "hello d";
  auto cs = cast (char *) std.string.toStringz(s);
  upcase(cs);
  writeln(std.string.fromStringz(cs));
}
<
<
<
<
<
<
<
<
<
<
<






















Deleted share/experiments/ffi-adventure/upcase/upcase-go/Makefile.

1
2
3
4
5
6
7
8
_build/upcase: upcase.go
	go build -o ${.TARGET} ${.ALLSRC:M*.go}

run: _build/upcase
	./_build/upcase

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
















Deleted share/experiments/ffi-adventure/upcase/upcase-go/upcase.go.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
package main

/*
#cgo CFLAGS: -I../upcase-c
#cgo LDFLAGS: -L../upcase-c/_build -lupcase
#include <stdlib.h>
#include "libupcase.h"
*/
import "C"
import "unsafe"
import "fmt"

func main() {
	hello := C.CString("hello go")
	defer C.free(unsafe.Pointer(hello))

	C.upcase(hello)
	fmt.Println(C.GoString(hello))
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






































Deleted share/experiments/ffi-adventure/upcase/upcase-nim/Makefile.

1
2
3
4
5
6
7
8
_build/upcase: upcase.nim
	nim c -o:upcase --outdir:_build --cincludes:../upcase-c --clibdir:../upcase-c/_build -l:-lupcase upcase.nim

run: _build/upcase
	./_build/upcase

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
















Deleted share/experiments/ffi-adventure/upcase/upcase-nim/upcase.nim.

1
2
3
4
5
6
# header may be optional since it can find the symbol in lib
proc upcase(name: cstring) {.header: "libupcase.h", importc: "upcase"}

var s = "hello nim"
upcase(cstring(s))
echo(s)
<
<
<
<
<
<












Deleted share/experiments/ffi-adventure/upcase/upcase-pony/Makefile.

1
2
3
4
5
6
7
8
_build/upcase: upcase.pony
	ponyc -o ${.TARGET:H} -b ${.TARGET:T} .

run: _build/upcase
	./_build/upcase

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
















Deleted share/experiments/ffi-adventure/upcase/upcase-pony/upcase.pony.

1
2
3
4
5
6
7
8
9
use "path:../upcase-c/_build"
use "lib:upcase"
use @upcase[I32](name: Pointer[U8] tag)

actor Main
  new create(env: Env) =>
    var s: String = recover "hello pony".clone() end
    @upcase(s.cstring())
    env.out.print(s)
<
<
<
<
<
<
<
<
<


















Deleted share/experiments/ffi-adventure/upcase/upcase-rust/.cargo/config.toml.

1
2
[build]
target-dir = "_build"
<
<




Deleted share/experiments/ffi-adventure/upcase/upcase-rust/.gitignore.

1
/target
<


Deleted share/experiments/ffi-adventure/upcase/upcase-rust/Cargo.lock.

1
2
3
4
5
6
7
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3

[[package]]
name = "upcase-rust"
version = "0.1.0"
<
<
<
<
<
<
<














Deleted share/experiments/ffi-adventure/upcase/upcase-rust/Cargo.toml.

1
2
3
4
5
6
7
8
[package]
name = "upcase-rust"
version = "0.1.0"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
<
<
<
<
<
<
<
<
















Deleted share/experiments/ffi-adventure/upcase/upcase-rust/Makefile.

1
2
3
4
5
6
7
8
9
10
.PHONY: all run clean

all:
	cargo build

run:
	cargo run

clean:
	cargo clean
<
<
<
<
<
<
<
<
<
<




















Deleted share/experiments/ffi-adventure/upcase/upcase-rust/build.rs.

1
2
3
4
fn main() {
    println!("cargo:rustc-link-search=../upcase-c/_build");
    println!("cargo:rustc-link-lib=upcase");
}
<
<
<
<








Deleted share/experiments/ffi-adventure/upcase/upcase-rust/src/main.rs.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
fn main() {
    let mut s = String::from("hello rust");
    do_upcase(&mut s);
    println!("{}", s);
}

fn do_upcase(s: &mut String) {
    unsafe {
        let vec = s.as_mut_vec();
        vec.reserve(1);
        vec.push(0);
        upcase(vec.as_mut_ptr());
        vec.pop();
    };
}

#[link(name = "upcase", kind = "static")]
extern "C" {
    fn upcase(s: *mut u8);
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































Deleted share/experiments/ffi-adventure/upcase/upcase-v/Makefile.

1
2
3
4
5
6
7
8
9
_build/upcase: upcase.v
	@mkdir -p ${.TARGET:H}
	v -o ${.TARGET} ${.ALLSRC}

run: _build/upcase
	./_build/upcase

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
<


















Deleted share/experiments/ffi-adventure/upcase/upcase-v/upcase.v.

1
2
3
4
5
6
7
8
9
10
11
12
#flag -lupcase
#flag -L../upcase-c/_build
#flag -I../upcase-c
#include "libupcase.h"

fn C.upcase(&char) int

fn main() {
  s := "hello v".clone()
  C.upcase(s.str)
  println(s)
}
<
<
<
<
<
<
<
<
<
<
<
<
























Deleted share/experiments/frank/Kyuafile.

1
2
3
4
5
syntax(2)

test_suite('frank')

include('tests/Kyuafile')
<
<
<
<
<










Deleted share/experiments/frank/README.md.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
# frank - mash branches together

## Usage

```
$ frank status

branch1 up to date
branch2 out of date
```

```
$ frank status

branch1 up to date

$ frank fetch
$ frank status

branch1 out of date

$ frank up

branch 1 up to date
```

## Config

```
remotes {
  freebsd {
    url: "https://github.com/freebsd/freebsd-ports.git"
  }

  patmaddox {
    url: "git@github.com:patmaddox/freebsd-ports.git"
  }
}

branches {
  main {
    from = "freebsd"
	to = "patmaddox"
	strategy = "ff"
  }
  
  patmaddox-ports {
    base = "main"
	remote = "patmaddox"
	strategy = "reset"
	branches = [
	  "poudriere-devel",
	  "www-vultr-cli",
	]
  }
  
  poudriere-devel {
    base = "main"
	remote = "patmaddox"
	strategy = "rebase"
  }
  
  www-vultr-cli {
    base = "main"
	remote = "patmaddox"
	strategy = "rebase"
  }
}
```

## Testing

`kyua test`

If a test fails:

`kyua debug test_suite.sh:test_case`
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


























































































































































Deleted share/experiments/frank/bin/frank.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
#!/bin/sh
set -e
cmd=${1:-usage}

cmd_status() {
    if [ -s .frankconfig ]; then
	dir_error=0
	for b in $(branches); do
	    b_stat=$(branch_status $b)
	    echo "$b $b_stat"
	    if [ $b_stat != "ok" ]; then
		dir_error=1
	    fi
	done

	if [ $dir_error -eq 1 ]; then exit 1; fi
    fi
}

cmd_up() {
    for b in $(branches); do
	if [ ! -d $b ]; then
	    if [ $b = "main" ]; then
		git clone $(remote $(branch_remote $b)) $b
	    else
		cd main
		git worktree add ../$b
	    fi
	fi
    done
}

branches() {
    uclget .branches
}

branch_status() {
    b=${1:?missing branch param}
    if [ -d $b ]; then
	echo ok
    else
	echo missing
    fi
}

branch_remote() {
    uclget .branches.${1:?missing branch name}.remote
}

remote() {
    uclget .remotes.${1:?missing remote name}
}

uclget() {
    uclcmd get -l -e -f .frankconfig ${1:?missing ucl key} | head -n 1 | sed -e 's/^"//' -e 's/"$//'
}

cmd_$cmd
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




















































































































Deleted share/experiments/frank/tests/Kyuafile.

1
2
3
4
5
6
syntax(2)

test_suite('frank')

atf_test_program{name='test_status.sh'}
atf_test_program{name='test_up.sh'}
<
<
<
<
<
<












Deleted share/experiments/frank/tests/test_status.sh.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
#! /usr/bin/env atf-sh
set -e

frank=$(atf_get_srcdir)/../bin/frank

## test cases
atf_init_test_cases() {
    atf_add_test_case no_config
    atf_add_test_case empty_config
    atf_add_test_case branch_dir_present
    atf_add_test_case branch_dir_not_present
}

## no config
atf_test_case no_config

no_config_body() {
    echo $frank
    atf_check -s exit:0 -o empty $frank status
}

## empty config
atf_test_case empty_config

empty_config_body() {
    touch .frankconfig
    atf_check -s exit:0 -o empty $frank status
}

## branch dir present
atf_test_case branch_dir_present

branch_dir_present_body() {
    mkdir present1 present2
    cat <<EOF > .frankconfig
branches {
  present1 {}
  present2 {}
}
EOF

    atf_check -s exit:0 -o match:"present1 ok" $frank status
    atf_check -s exit:0 -o match:"present2 ok" $frank status
}

## branch dir not present
atf_test_case branch_dir_not_present

branch_dir_not_present_body() {
    mkdir present

    cat <<EOF > .frankconfig
branches {
  present {}
  not_present {}
}
EOF

    atf_check -s exit:1 -o match:"present ok" $frank status
    atf_check -s exit:1 -o match:"not_present missing" $frank status
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


























































































































Deleted share/experiments/frank/tests/test_up.sh.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
#! /usr/bin/env atf-sh
set -e

frank=$(atf_get_srcdir)/../bin/frank

## test cases
atf_init_test_cases() {
    atf_add_test_case main
    atf_add_test_case worktree
    atf_add_test_case double_up
}

## main
atf_test_case main

main_body() {
    cat <<EOF > .frankconfig
remotes {
  local = "source-repo"
}

branches {
  main {
    remote = "local"
  }
}
EOF
    
    git_hello

    atf_check -s exit:1 -o match:"main missing" $frank status
    atf_check -s exit:0 -e ignore $frank up
    atf_check -s exit:0 -o match:"main ok" $frank status
}

## worktree
atf_test_case worktree

worktree_body() {
    cat <<EOF > .frankconfig
remotes {
  local = "source-repo"
}

branches {
  main {
    remote = "local"
  }

  branch1 {}
}
EOF
    
    git_hello

    atf_check -s exit:1 -o match:"branch1 missing" $frank status
    atf_check -s exit:0 -o ignore -e ignore $frank up
    atf_check -s exit:0 -o match:"branch1 ok" $frank status
}

## double_up
atf_test_case double_up

double_up_body() {
    cat <<EOF > .frankconfig
remotes {
  local = "source-repo"
}

branches {
  main {
    remote = "local"
  }
}
EOF
    
    git_hello

    atf_check -s exit:1 -o match:"main missing" $frank status
    atf_check -s exit:0 -o ignore -e ignore $frank up
    atf_check -s exit:0 $frank up
    atf_check -s exit:0 -o match:"main ok" $frank status
}

## helpers
git_hello() {
    export GIT_AUTHOR_NAME=kyua
    export GIT_AUTHOR_EMAIL=kyua
    export GIT_COMMITTER_NAME=kyua
    export GIT_COMMITTER_EMAIL=kyua

    mkdir source-repo && cd source-repo
    which git
    git init -b main
    echo "hello world" > hello
    git add hello
    git commit -m "hello"
    cd ..
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






































































































































































































Deleted share/experiments/gauntlet/.envrc.

1
export BSDTESTS=/home/patmaddox/freebsd-releng-13.2/usr/tests
<


Deleted share/experiments/gauntlet/Kyuafile.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
-- $FreeBSD$
--
-- Copyright 2011 Google Inc.
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are
-- met:
--
-- * Redistributions of source code must retain the above copyright
--   notice, this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
--   notice, this list of conditions and the following disclaimer in the
--   documentation and/or other materials provided with the distribution.
-- * Neither the name of Google Inc. nor the names of its contributors
--   may be used to endorse or promote products derived from this software
--   without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-- "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-- OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

-- Automatically recurses into any subdirectory that holds a Kyuafile.
-- As such, this Kyuafile is suitable for installation into the root of
-- the tests hierarchy as well as into any other subdirectory that needs
-- "auto-discovery" of tests.
--
-- This file is based on the Kyuafile.top sample file distributed in the
-- kyua-cli package.

syntax(2)

local directory = fs.dirname(current_kyuafile())
for file in fs.files(directory) do
    if file == "." or file == ".." then
        -- Skip these special entries.
    else
        local kyuafile_relative = fs.join(file, "Kyuafile")
        local kyuafile_absolute = fs.join(directory, kyuafile_relative)
        if fs.exists(kyuafile_absolute) then
            include(kyuafile_relative)
        end
    end
end
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































































































Deleted share/experiments/gauntlet/freebsd/Justfile.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
help:
  just -l

[no-cd]
init:
  #!/bin/sh
  set -e
  : ${BSDTESTS:?}
  bin=$(basename $(pwd))
  bindir=$(basename $(dirname $(pwd)))
  cat $BSDTESTS/$bindir/$bin/Kyuafile |
    grep -v -- '-- Automatically generated' |
    sed -e '2,$b' -e '/^$/d' |
    sed -e 's/^test_suite("FreeBSD")/test_suite("gauntlet")/' \
    > Kyuafile
  tests=$(grep -E -o 'atf_test_program\{name="([[:alpha:]]|_)*"' Kyuafile | sed -E -e 's/.*"(.*)".*/\1/')
  for t in $tests; do
    cat > ${t}_ref <<EOF
  #! /usr/bin/env atf-sh
  set -e

  . \${BSDTESTS}/$bindir/$bin/$t
  EOF
    chmod +x ${t}_ref
    sed -i '' -e "s/atf_test_program{name=\"$t\"/atf_test_program{name=\"${t}_ref\"/" Kyuafile
  done

[no-cd]
impl name:
  #!/bin/sh
  set -e
  name="{{name}}"
  grep -E 'atf_test_program\{name="([[:alpha:]]|_)*_ref"' Kyuafile  | sed -E "s/(.*\{name=\")(([[:alpha:]]|_)*)_ref\"(.*)/\1\2_${name}\"\4/" >> Kyuafile

  for f in *_ref; do
    newname=$(echo $f | sed -E -e "s/(.*)_ref$/\1_$name/")
    cat $f | sed -e '0,$b' -e "/^$/a\\
  PATH=\$(realpath \$(atf_get_srcdir))/$name/_build:\$PATH" > $newname
  chmod +x $newname
  done
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
















































































Deleted share/experiments/gauntlet/freebsd/Kyuafile.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
-- $FreeBSD$
--
-- Copyright 2011 Google Inc.
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are
-- met:
--
-- * Redistributions of source code must retain the above copyright
--   notice, this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
--   notice, this list of conditions and the following disclaimer in the
--   documentation and/or other materials provided with the distribution.
-- * Neither the name of Google Inc. nor the names of its contributors
--   may be used to endorse or promote products derived from this software
--   without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-- "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-- OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

-- Automatically recurses into any subdirectory that holds a Kyuafile.
-- As such, this Kyuafile is suitable for installation into the root of
-- the tests hierarchy as well as into any other subdirectory that needs
-- "auto-discovery" of tests.
--
-- This file is based on the Kyuafile.top sample file distributed in the
-- kyua-cli package.

syntax(2)

local directory = fs.dirname(current_kyuafile())
for file in fs.files(directory) do
    if file == "." or file == ".." then
        -- Skip these special entries.
    else
        local kyuafile_relative = fs.join(file, "Kyuafile")
        local kyuafile_absolute = fs.join(directory, kyuafile_relative)
        if fs.exists(kyuafile_absolute) then
            include(kyuafile_relative)
        end
    end
end
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































































































Deleted share/experiments/gauntlet/freebsd/bin/Kyuafile.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
-- $FreeBSD$
--
-- Copyright 2011 Google Inc.
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are
-- met:
--
-- * Redistributions of source code must retain the above copyright
--   notice, this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
--   notice, this list of conditions and the following disclaimer in the
--   documentation and/or other materials provided with the distribution.
-- * Neither the name of Google Inc. nor the names of its contributors
--   may be used to endorse or promote products derived from this software
--   without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-- "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-- OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

-- Automatically recurses into any subdirectory that holds a Kyuafile.
-- As such, this Kyuafile is suitable for installation into the root of
-- the tests hierarchy as well as into any other subdirectory that needs
-- "auto-discovery" of tests.
--
-- This file is based on the Kyuafile.top sample file distributed in the
-- kyua-cli package.

syntax(2)

local directory = fs.dirname(current_kyuafile())
for file in fs.files(directory) do
    if file == "." or file == ".." then
        -- Skip these special entries.
    else
        local kyuafile_relative = fs.join(file, "Kyuafile")
        local kyuafile_absolute = fs.join(directory, kyuafile_relative)
        if fs.exists(kyuafile_absolute) then
            include(kyuafile_relative)
        end
    end
end
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































































































Deleted share/experiments/gauntlet/freebsd/bin/cp/Kyuafile.

1
2
3
4
5
6
7
8
-- Automatically generated by mk_tests.sh.

syntax(2)

test_suite("gauntlet")

atf_test_program{name="test_cp_test_ref", }
atf_test_program{name="test_cp_test_elixir", }
<
<
<
<
<
<
<
<
















Deleted share/experiments/gauntlet/freebsd/bin/cp/Makefile.

1
2
3
4
5
6
7
8
9
ROOT=		../..
IMPLS=		elixir # rust go

all: sparse

sparse: ${BSDTESTS}/bin/cp/sparse
	cp ${.ALLSRC} ${.TARGET}

.include "${ROOT}/kyua.mk"
<
<
<
<
<
<
<
<
<


















Deleted share/experiments/gauntlet/freebsd/bin/cp/elixir/Makefile.

1
2
3
_build/cp! cp.exs
	@mkdir -p ${.TARGET:H}
	cp ${.ALLSRC} ${.TARGET}
<
<
<






Deleted share/experiments/gauntlet/freebsd/bin/cp/elixir/cp.exs.

1
2
3
#!/usr/bin/env elixir
[from, to] = System.argv()
File.cp(from, to)
<
<
<






Deleted share/experiments/gauntlet/freebsd/bin/cp/test_cp_test_elixir.

1
2
3
4
5
6
7
#! /usr/bin/env atf-sh
# automatically generated by mk_tests.sh
set -e

export LC_ALL=C.UTF-8 # otherwise VM complains about incorrect locale
PATH=$(realpath $(atf_get_srcdir))/elixir/_build:$PATH
. $(atf_get_srcdir)/test_cp_test_ref
<
<
<
<
<
<
<














Deleted share/experiments/gauntlet/freebsd/bin/cp/test_cp_test_ref.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
#! /usr/libexec/atf-sh
#
# SPDX-License-Identifier: BSD-2-Clause-FreeBSD
#
# Copyright (c) 2020 Kyle Evans <kevans@FreeBSD.org>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
#    notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
#    notice, this list of conditions and the following disclaimer in the
#    documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
# $FreeBSD$

check_size()
{
	file=$1
	sz=$2

	atf_check -o inline:"$sz\n" stat -f '%z' $file
}

atf_test_case basic
basic_body()
{
	echo "foo" > bar

	atf_check cp bar baz
	check_size baz 4
}

atf_test_case basic_symlink
basic_symlink_body()
{
	echo "foo" > bar
	ln -s bar baz

	atf_check cp baz foo
	atf_check test '!' -L foo

	atf_check -e inline:"cp: baz and baz are identical (not copied).\n" \
	    -s exit:1 cp baz baz
	atf_check -e inline:"cp: bar and baz are identical (not copied).\n" \
	    -s exit:1 cp baz bar
}

atf_test_case chrdev
chrdev_body()
{
	echo "foo" > bar

	check_size bar 4
	atf_check cp /dev/null trunc
	check_size trunc 0
	atf_check cp bar trunc
	check_size trunc 4
	atf_check cp /dev/null trunc
	check_size trunc 0
}

atf_test_case matching_srctgt
matching_srctgt_body()
{

	# PR235438: `cp -R foo foo` would previously infinitely recurse and
	# eventually error out.
	mkdir foo
	echo "qux" > foo/bar
	cp foo/bar foo/zoo

	atf_check cp -R foo foo
	atf_check -o inline:"qux\n" cat foo/foo/bar
	atf_check -o inline:"qux\n" cat foo/foo/zoo
	atf_check -e not-empty -s not-exit:0 stat foo/foo/foo
}

atf_test_case matching_srctgt_contained
matching_srctgt_contained_body()
{

	# Let's do the same thing, except we'll try to recursively copy foo into
	# one of its subdirectories.
	mkdir foo
	ln -s foo coo
	echo "qux" > foo/bar
	mkdir foo/moo
	touch foo/moo/roo
	cp foo/bar foo/zoo

	atf_check cp -R foo foo/moo
	atf_check cp -RH coo foo/moo
	atf_check -o inline:"qux\n" cat foo/moo/foo/bar
	atf_check -o inline:"qux\n" cat foo/moo/coo/bar
	atf_check -o inline:"qux\n" cat foo/moo/foo/zoo
	atf_check -o inline:"qux\n" cat foo/moo/coo/zoo

	# We should have copied the contents of foo/moo before foo, coo started
	# getting copied in.
	atf_check -o not-empty stat foo/moo/foo/moo/roo
	atf_check -o not-empty stat foo/moo/coo/moo/roo
	atf_check -e not-empty -s not-exit:0 stat foo/moo/foo/moo/foo
	atf_check -e not-empty -s not-exit:0 stat foo/moo/coo/moo/coo
}

atf_test_case matching_srctgt_link
matching_srctgt_link_body()
{

	mkdir foo
	echo "qux" > foo/bar
	cp foo/bar foo/zoo

	atf_check ln -s foo roo
	atf_check cp -RH roo foo
	atf_check -o inline:"qux\n" cat foo/roo/bar
	atf_check -o inline:"qux\n" cat foo/roo/zoo
}

atf_test_case matching_srctgt_nonexistent
matching_srctgt_nonexistent_body()
{

	# We'll copy foo to a nonexistent subdirectory; ideally, we would
	# skip just the directory and end up with a layout like;
	#
	# foo/
	#     bar
	#     dne/
	#         bar
	#         zoo
	#     zoo
	#
	mkdir foo
	echo "qux" > foo/bar
	cp foo/bar foo/zoo

	atf_check cp -R foo foo/dne
	atf_check -o inline:"qux\n" cat foo/dne/bar
	atf_check -o inline:"qux\n" cat foo/dne/zoo
	atf_check -e not-empty -s not-exit:0 stat foo/dne/foo
}

recursive_link_setup()
{
	extra_cpflag=$1

	mkdir -p foo/bar
	ln -s bar foo/baz

	mkdir foo-mirror
	eval "cp -R $extra_cpflag foo foo-mirror"
}

atf_test_case recursive_link_dflt
recursive_link_dflt_body()
{
	recursive_link_setup

	# -P is the default, so this should work and preserve the link.
	atf_check cp -R foo foo-mirror
	atf_check test -L foo-mirror/foo/baz
}

atf_test_case recursive_link_Hflag
recursive_link_Hflag_body()
{
	recursive_link_setup

	# -H will not follow either, so this should also work and preserve the
	# link.
	atf_check cp -RH foo foo-mirror
	atf_check test -L foo-mirror/foo/baz
}

atf_test_case recursive_link_Lflag
recursive_link_Lflag_body()
{
	recursive_link_setup -L

	# -L will work, but foo/baz ends up expanded to a directory.
	atf_check test -d foo-mirror/foo/baz -a \
	    '(' ! -L foo-mirror/foo/baz ')'
	atf_check cp -RL foo foo-mirror
	atf_check test -d foo-mirror/foo/baz -a \
	    '(' ! -L foo-mirror/foo/baz ')'
}

file_is_sparse()
{
	atf_check ${0%/*}/sparse "$1"
}

files_are_equal()
{
	atf_check test "$(stat -f "%d %i" "$1")" != "$(stat -f "%d %i" "$2")"
	atf_check cmp "$1" "$2"
}

atf_test_case sparse_leading_hole
sparse_leading_hole_body()
{
	# A 16-megabyte hole followed by one megabyte of data
	truncate -s 16M foo
	seq -f%015g 65536 >>foo
	file_is_sparse foo

	atf_check cp foo bar
	files_are_equal foo bar
	file_is_sparse bar
}

atf_test_case sparse_multiple_holes
sparse_multiple_holes_body()
{
	# Three one-megabyte blocks of data preceded, separated, and
	# followed by 16-megabyte holes
	truncate -s 16M foo
	seq -f%015g 65536 >>foo
	truncate -s 33M foo
	seq -f%015g 65536 >>foo
	truncate -s 50M foo
	seq -f%015g 65536 >>foo
	truncate -s 67M foo
	file_is_sparse foo

	atf_check cp foo bar
	files_are_equal foo bar
	file_is_sparse bar
}

atf_test_case sparse_only_hole
sparse_only_hole_body()
{
	# A 16-megabyte hole
	truncate -s 16M foo
	file_is_sparse foo

	atf_check cp foo bar
	files_are_equal foo bar
	file_is_sparse bar
}

atf_test_case sparse_to_dev
sparse_to_dev_body()
{
	# Three one-megabyte blocks of data preceded, separated, and
	# followed by 16-megabyte holes
	truncate -s 16M foo
	seq -f%015g 65536 >>foo
	truncate -s 33M foo
	seq -f%015g 65536 >>foo
	truncate -s 50M foo
	seq -f%015g 65536 >>foo
	truncate -s 67M foo
	file_is_sparse foo

	atf_check -o file:foo cp foo /dev/stdout
}

atf_test_case sparse_trailing_hole
sparse_trailing_hole_body()
{
	# One megabyte of data followed by a 16-megabyte hole
	seq -f%015g 65536 >foo
	truncate -s 17M foo
	file_is_sparse foo

	atf_check cp foo bar
	files_are_equal foo bar
	file_is_sparse bar
}

atf_test_case standalone_Pflag
standalone_Pflag_body()
{
	echo "foo" > bar
	ln -s bar foo

	atf_check cp -P foo baz
	atf_check -o inline:'Symbolic Link\n' stat -f %SHT baz
}

atf_init_test_cases()
{
	atf_add_test_case basic
	atf_add_test_case basic_symlink
	atf_add_test_case chrdev
	atf_add_test_case matching_srctgt
	atf_add_test_case matching_srctgt_contained
	atf_add_test_case matching_srctgt_link
	atf_add_test_case matching_srctgt_nonexistent
	atf_add_test_case recursive_link_dflt
	atf_add_test_case recursive_link_Hflag
	atf_add_test_case recursive_link_Lflag
	atf_add_test_case sparse_leading_hole
	atf_add_test_case sparse_multiple_holes
	atf_add_test_case sparse_only_hole
	atf_add_test_case sparse_to_dev
	atf_add_test_case sparse_trailing_hole
	atf_add_test_case standalone_Pflag
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
























































































































































































































































































































































































































































































































































































































































Deleted share/experiments/gauntlet/freebsd/kyua.mk.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
BINNAME=	${.CURDIR:T}
BINDIR=		${.CURDIR:H:T}
FULLBIN=	${BINDIR}/${BINNAME}
MAKEFILES!=	ls */Makefile

.MAIN: all

.PHONY: all clean

all:
clean:

Kyuafile!
	rm -f test_*
	${ROOT}/mk_kyua.sh ${FULLBIN} "${IMPLS}"

.for f in ${MAKEFILES}
all: ${f}
${f}!
	${MAKE} -C ${f:H}
.endfor

.for i in ${IMPLS}
clean: clean-${i}
clean-${i}:
	rm -rf ${i}/_build
	mkdir ${i}/_build
	printf "#!/bin/sh\nfalse" > ${i}/_build/${BINNAME}
	chmod +x ${i}/_build/${BINNAME}
.endfor
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




























































Deleted share/experiments/gauntlet/freebsd/mk_kyua.sh.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
#!/bin/sh
set -e

: ${BSDTESTS:?}
fullbin=$1
impls=$2

cat $BSDTESTS/$fullbin/Kyuafile |
    sed -e 's/generated by bsd.test.mk/generated by mk_tests.sh/' |
#    sed -e '2,$b' -e '/^$/d' |
    sed -e 's/^test_suite("FreeBSD")/test_suite("gauntlet")/' \
	> Kyuafile

tests=$(grep -E -o 'atf_test_program\{name="([[:alpha:]]|_)*"' Kyuafile | sed -E -e 's/.*"(.*)".*/\1/')
for t in $tests; do
    testfile=test_${t}_ref
    cat $BSDTESTS/$fullbin/$t > $testfile
    chmod +x $testfile
    sed -i '' -e "s/atf_test_program{name=\"$t\"/atf_test_program{name=\"$testfile\"/" Kyuafile
done

for i in $impls; do
    grep -E 'atf_test_program\{name="([[:alpha:]]|_)*_ref"' Kyuafile | sed -E "s/(.*\{name=\")(([[:alpha:]]|_)*)_ref\"(.*)/\1\2_${i}\"\4/" >> Kyuafile

    for f in *_ref; do
	newname=$(echo $f | sed -E -e "s/(.*)_ref$/\1_$i/")
	if [ "$i" = "elixir" ]; then
	    lcline='export LC_ALL=C.UTF-8 # otherwise VM complains about incorrect locale'
	fi
	cat > $newname <<EOF
#! /usr/bin/env atf-sh
# automatically generated by mk_tests.sh
set -e

$lcline
PATH=\$(realpath \$(atf_get_srcdir))/$i/_build:\$PATH
. \$(atf_get_srcdir)/$f
EOF
#	cat $f | sed -e '0,$b' -e "/^$/a\\
#PATH=\$(realpath \$(atf_get_srcdir))/$i/_build:\$PATH" > $newname
	chmod +x $newname
    done
done
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






















































































Deleted share/experiments/gauntlet/games/Kyuafile.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
-- $FreeBSD$
--
-- Copyright 2011 Google Inc.
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are
-- met:
--
-- * Redistributions of source code must retain the above copyright
--   notice, this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
--   notice, this list of conditions and the following disclaimer in the
--   documentation and/or other materials provided with the distribution.
-- * Neither the name of Google Inc. nor the names of its contributors
--   may be used to endorse or promote products derived from this software
--   without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-- "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-- OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

-- Automatically recurses into any subdirectory that holds a Kyuafile.
-- As such, this Kyuafile is suitable for installation into the root of
-- the tests hierarchy as well as into any other subdirectory that needs
-- "auto-discovery" of tests.
--
-- This file is based on the Kyuafile.top sample file distributed in the
-- kyua-cli package.

syntax(2)

local directory = fs.dirname(current_kyuafile())
for file in fs.files(directory) do
    if file == "." or file == ".." then
        -- Skip these special entries.
    else
        local kyuafile_relative = fs.join(file, "Kyuafile")
        local kyuafile_absolute = fs.join(directory, kyuafile_relative)
        if fs.exists(kyuafile_absolute) then
            include(kyuafile_relative)
        end
    end
end
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































































































Deleted share/experiments/gauntlet/games/simpleguess/Kyuafile.

1
2
3
4
5
6
syntax(2)

test_suite("gauntlet")

atf_test_program{name="test_simpleguess_c"}
atf_test_program{name="test_simpleguess_elixir"}
<
<
<
<
<
<












Deleted share/experiments/gauntlet/games/simpleguess/c/Makefile.

1
2
3
4
5
6
7
8
.PHONY: run test

_build/simpleguess: simpleguess.c
	@mkdir -p ${.TARGET:H}
	cc -o ${.TARGET} -Wall -Werror ${.ALLSRC}

run: _build/simpleguess
	./${.ALLSRC}
<
<
<
<
<
<
<
<
















Deleted share/experiments/gauntlet/games/simpleguess/c/simpleguess.c.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
#include <stdio.h>
#include <string.h>

int main(int argc, char **argv)
{
  char guess[2];

  printf("Guess your number: ");
  scanf("%1s", guess);
  if(strcmp(guess, argv[1]) == 0) {
    printf("right!\n");
  } else {
    printf("wrong :(\n");
  }
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






























Deleted share/experiments/gauntlet/games/simpleguess/elixir/Makefile.

1
2
3
_build/simpleguess! simpleguess.exs
	@mkdir -p ${.TARGET:H}
	cp ${.ALLSRC} ${.TARGET}
<
<
<






Deleted share/experiments/gauntlet/games/simpleguess/elixir/simpleguess.exs.

1
2
3
4
5
6
7
8
#!/usr/bin/env elixir
[correct | _] = System.argv()
guess = IO.gets("Guess your number: ") |> String.trim()
if guess == correct do
  IO.puts("right!")
else
  IO.puts("wrong :(")
end
<
<
<
<
<
<
<
<
















Deleted share/experiments/gauntlet/games/simpleguess/test_simpleguess_c.

1
2
3
4
5
#! /usr/libexec/atf-sh
set -e

PATH=$(realpath $(atf_get_srcdir))/c/_build:$PATH
. $(atf_get_srcdir)/test_simpleguess_shared
<
<
<
<
<










Deleted share/experiments/gauntlet/games/simpleguess/test_simpleguess_elixir.

1
2
3
4
5
6
#! /usr/libexec/atf-sh
set -e

export LC_ALL=C.UTF-8 # otherwise VM complains about incorrect locale
PATH=$(realpath $(atf_get_srcdir))/elixir/_build:$PATH
. $(atf_get_srcdir)/test_simpleguess_shared
<
<
<
<
<
<












Deleted share/experiments/gauntlet/games/simpleguess/test_simpleguess_shared.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
atf_init_test_cases()
{
    atf_add_test_case 1_right
    atf_add_test_case 2_wrong
    # wrong then right
}

atf_test_case 1_right
1_right_body()
{
    cat > input <<EOF
3
EOF
    atf_check -o inline:"Guess your number: right!\n" simpleguess 3 < input
}

atf_test_case 2_wrong
2_wrong_body()
{
    cat > input <<EOF
1
EOF
    atf_check -o inline:"Guess your number: wrong :(\n" simpleguess 3 < input
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
















































Deleted share/experiments/inventory/Makefile.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
IMPL?=		crystal
IMPL_DIR=	${.CURDIR}/inventory-${IMPL}
PATH:=		${IMPL_DIR}/_build:${PATH}
.export PATH

.PHONY: test build

build:
	cd ${IMPL_DIR} && make

test: build _build/inventory_test
	./_build/inventory_test

_build/inventory_test: inventory_test.sh
	@mkdir -p ${.TARGET:H}
	shtk build -o ${.TARGET} -m shtk_unittest_main inventory_test.sh
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
































Deleted share/experiments/inventory/inventory-crystal/.editorconfig.

1
2
3
4
5
6
7
8
9
root = true

[*.cr]
charset = utf-8
end_of_line = lf
insert_final_newline = true
indent_style = space
indent_size = 2
trim_trailing_whitespace = true
<
<
<
<
<
<
<
<
<


















Deleted share/experiments/inventory/inventory-crystal/.gitignore.

1
2
3
4
5
/docs/
/lib/
/bin/
/.shards/
*.dwarf
<
<
<
<
<










Deleted share/experiments/inventory/inventory-crystal/LICENSE.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
MIT License

Copyright (c) 2024 Pat Maddox <pat@patmaddox.com>

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<










































Deleted share/experiments/inventory/inventory-crystal/Makefile.

1
2
3
4
5
6
.PHONY: build
CR_FILES!= find src -name '*.cr'

_build/inventory: ${CR_FILES}
	@mkdir -p _build
	crystal build -o _build/inventory src/inventory.cr
<
<
<
<
<
<












Deleted share/experiments/inventory/inventory-crystal/README.md.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
# inventory

TODO: Write a description here

## Installation

TODO: Write installation instructions here

## Usage

TODO: Write usage instructions here

## Development

TODO: Write development instructions here

## Contributing

1. Fork it (<https://github.com/your-github-user/inventory-crystal/fork>)
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Add some feature'`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create a new Pull Request

## Contributors

- [Pat Maddox](https://github.com/your-github-user) - creator and maintainer
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






















































Deleted share/experiments/inventory/inventory-crystal/shard.yml.

1
2
3
4
5
6
7
8
9
10
11
12
13
name: inventory
version: 0.1.0

authors:
  - Pat Maddox <pat@patmaddox.com>

targets:
  inventory:
    main: src/inventory.cr

crystal: '>= 1.10.1'

license: MIT
<
<
<
<
<
<
<
<
<
<
<
<
<


























Deleted share/experiments/inventory/inventory-crystal/spec/inventory_spec.cr.

1
2
3
4
5
6
7
8
9
require "./spec_helper"

describe Inventory do
  # TODO: Write tests

  it "works" do
    false.should eq(true)
  end
end
<
<
<
<
<
<
<
<
<


















Deleted share/experiments/inventory/inventory-crystal/spec/spec_helper.cr.

1
2
require "spec"
require "../src/inventory"
<
<




Deleted share/experiments/inventory/inventory-crystal/src/inventory.cr.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
require "./runner"

Inventory::Runner.new.run

class Inventory
  @filename = ".inventory"
  @items = [] of String
  @dirty = false

  def initialize(&)
    read
    yield self
    write if @dirty
  end

  def show
    puts @items.join("\n") if @items.any?
  end

  def set(item : String)
    unless @items.includes?(item)
      @items << item
      @dirty = true
    end
  end

  def delete(item : String)
    if @items.delete(item)
      @dirty = true
    else
      STDERR.puts "E: no item '#{item}'"
      exit 1
    end
  end

  private def read
    @items = File.read_lines(@filename) if File.exists?(@filename)
  end

  private def write
    if @dirty
      File.open(@filename, "w") do |f|
        @items.each {|i| f.puts(i) }
      end
      @dirty = false
    end
  end
end
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
































































































Deleted share/experiments/inventory/inventory-crystal/src/runner.cr.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
require "option_parser"

class Inventory
  enum Command
    Show
    Set
    Delete
    Unknown
  end
  
  class Runner
    @parser : OptionParser
    @command = Command::Unknown
    @item : String|Nil
  
    def initialize
      @parser = init_parser
      @parser.parse
    end
  
    def run
      item = @item
      case @command
      in Command::Show
        Inventory.new {|i| i.show }
      in Command::Set
        Inventory.new {|i| i.set(item) } if item
      in Command::Delete
        Inventory.new {|i| i.delete(item) } if item
      in Command::Unknown
        puts @parser
        exit(1)
      end
    end

    private def init_parser
      OptionParser.new do |parser|
        parser.banner = "Usage: inventory [subcommand] [arguments]"
      
        parser.on("show", "Show the inventory") do
          @command = Command::Show
          parser.banner = "Usage: inventory show"
        end
      
        parser.on("set", "Set an item in inventory") do
          @command = Command::Set
          parser.banner = "Usage: inventory set [item]"
          @item = ARGV[-1]
        end
      
        parser.on("del", "Delete an item in inventory") do
          @command = Command::Delete
          parser.banner = "Usage: inventory del [item]"
          @item = ARGV[-1]
        end
      end
    end
  end
end
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






















































































































Deleted share/experiments/inventory/inventory_test.sh.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
shtk_import unittest

shtk_unittest_add_test empty
empty_test() {
    assert_command inventory show
    assert_command test ! -f .inventory
}

shtk_unittest_add_test one_item
one_item_test() {
    assert_command inventory set pants
    assert_command -o inline:"pants\n" inventory show
}

shtk_unittest_add_test two_items
two_items_test() {
    assert_command inventory set pants
    assert_command inventory set shirt
    assert_command -o inline:"pants\nshirt\n" inventory show
}

shtk_unittest_add_test re_add
re_add_test() {
    assert_command inventory set pants
    assert_command inventory set shirt
    assert_command inventory set pants
    assert_command -o inline:"pants\nshirt\n" inventory show
}

shtk_unittest_add_test delete
delete_test() {
    assert_command inventory set pants
    assert_command inventory set shirt
    assert_command inventory del pants
    assert_command -o inline:"shirt\n" inventory show
}

shtk_unittest_add_test delete_missing
delete_missing_test() {
    assert_command -s 1 -e inline:"E: no item 'pants'\n" inventory del pants
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


















































































Deleted share/experiments/jail-networking/epair.sh.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
#!/bin/sh
set -e

jail_name="myjail"
host_iface=host_if
jail_iface=${jail_name}_if
host_ip=192.168.4.1
jail_ip=192.168.4.2
netmask=24

usage() {
    echo "Usage:"
    echo "epair.sh start"
    echo "epair.sh stop"
    echo "epair.sh test"
    echo "epair.sh pf # display current pf nat and rules"
}

start() {
    start_ifconfig
    start_jail
    start_pf
}

stop() {
    stop_pf
    stop_jail
    stop_ifconfig
}

start_ifconfig() {
    iface_a=$(ifconfig epair create up)
    iface_b=$(echo $iface_a | sed 's/a$/b/')
    ifconfig $iface_a name $host_iface > /dev/null
    ifconfig $iface_b name $jail_iface > /dev/null

    ifconfig $host_iface inet ${host_ip}/${netmask}
}

stop_ifconfig() {
    ifconfig $host_iface destroy
}

start_jail() {
    jail -c \
	 name=$jail_name \
	 persist \
	 vnet \
	 vnet.interface=$jail_iface

    jexec $jail_name ifconfig lo0 up
    jexec $jail_name ifconfig $jail_iface inet ${jail_ip}/${netmask}
    jexec $jail_name route add default $host_ip > /dev/null
}

stop_jail() {
    jail -r $jail_name
}

start_pf() {
    pfctl -f pf.conf
}

stop_pf() {
    pfctl -f /etc/pf.conf
}

pf() {
    pfctl -s nat
    pfctl -s rules
}

test() {
    echo "=== host: $host_ip"
    echo "=== jail: $jail_ip"

    ping="ping -q -W 1 -c 1"

    echo "=== ping host to self:"
    $ping $host_ip
    echo

    echo "=== ping host to jail:"
    $ping $jail_ip
    echo

    echo "=== ping jail to self:"
    jexec $jail_name $ping $jail_ip
    echo

    echo "=== ping jail to host"
    jexec $jail_name $ping $host_ip
    echo

    gateway=$(route -n get default | grep gateway | awk '{print $2}')
    echo "=== gateway: $gateway"
    echo "=== ping jail to gateway:"
    jexec $jail_name $ping $gateway
    echo

    echo "=== ping jail to 8.8.8.8:"
    jexec $jail_name $ping 8.8.8.8
    echo
    
    echo "OK"
}

command=${1:-usage}
$command
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


























































































































































































































Deleted share/experiments/jail-networking/pf.conf.

1
2
3
4
5
6
7
8
9
10
11
12
13
ext_if = "em0"
jail_net = "192.168.4.0/24"

set skip on lo
scrub in

nat log (all) on $ext_if from $jail_net -> ($ext_if:0)

pass out
pass in

pass log (all) from $jail_net
pass log (all) to $jail_net
<
<
<
<
<
<
<
<
<
<
<
<
<


























Deleted share/experiments/jail-proxy/README.md.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
# FreeBSD Jail Proxy

Inspired by FreeBSD forum thread: https://forums.freebsd.org/threads/isolating-host-machine-sending-traffic-through-a-jail-proxy.90070/

Setup:

- host machine
- one proxy jail
- other jails connect to internet through proxy jail

Config:

- host machine NAT to external network
- bridge interface on host
- jails set defaultgateway to proxy

## Issues

With this setup and pf.conf, packets going from jail -> proxy -> host don't get NAT.

This is evident by logging packets:

```
 00:00:07.022015 rule 2/0(match): pass in on proxy-bridge: 192.168.4.1 > 1.1.1.1: ICMP echo request, id 64483, seq 0, length 64
 00:00:00.000022 rule 0/0(match): nat out on em0: 192.168.1.126 > 1.1.1.1: ICMP echo request, id 42665, seq 0, length 64
 00:00:00.014273 rule 0/0(match): nat in on em0: 1.1.1.1 > 192.168.4.1: ICMP echo reply, id 64483, seq 0, length 64
 00:00:00.000003 rule 2/0(match): pass out on proxy-bridge: 1.1.1.1 > 192.168.4.1: ICMP echo reply, id 64483, seq 0, length 64

 00:00:12.705671 rule 2/0(match): pass in on epair11a: 192.168.4.12 > 1.1.1.1: ICMP echo request, id 8174, seq 0, length 64
 00:00:00.000013 rule 2/0(match): pass in on proxy-bridge: 192.168.4.12 > 1.1.1.1: ICMP echo request, id 8174, seq 0, length 64
 00:00:00.000007 rule 2/0(match): pass out on proxy-bridge: 192.168.4.12 > 1.1.1.1: ICMP echo request, id 8174, seq 0, length 64
 00:00:00.000003 rule 2/0(match): pass out on epair10a: 192.168.4.12 > 1.1.1.1: ICMP echo request, id 8174, seq 0, length 64
 00:00:00.000030 rule 2/0(match): pass in on proxy-bridge: 192.168.4.12 > 1.1.1.1: ICMP echo request, id 8174, seq 0, length 64
 00:00:00.000004 rule 2/0(match): pass out on em0: 192.168.4.12 > 1.1.1.1: ICMP echo request, id 8174, seq 0, length 64
```
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






































































Deleted share/experiments/jail-proxy/jail.sh.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
#!/bin/sh
set -e

if ! ifconfig proxy-bridge > /dev/null 2>&1; then
    ifconfig bridge create name proxy-bridge up
fi

if ! ifconfig epair254a > /dev/null 2>&1; then
    ifconfig epair254 create up
    ifconfig epair254a inet 192.168.4.254/24
    ifconfig proxy-bridge addm epair254b
    ifconfig epair254b up
fi

if ! ifconfig epair10a > /dev/null 2>&1; then
    ifconfig epair10 create up
    ifconfig proxy-bridge addm epair10a
fi

jail -c \
     name=proxy \
     persist \
     vnet \
     vnet.interface=epair10b

jexec proxy ifconfig lo0 up
jexec proxy ifconfig epair10b inet 192.168.4.1/24 up
jexec proxy route add default 192.168.4.254 # host
jexec proxy sysctl net.inet.ip.forwarding=1

#if ! ifconfig epair11a > /dev/null 2>&1; then
#    ifconfig epair11 create up
#    ifconfig proxy-bridge addm epair11a
#fi
#
#jail -c \
#     name=jail_11 \
#     persist \
#     vnet \
#     vnet.interface=epair11b
#
#jexec jail_11 ifconfig epair11b inet 192.168.4.11/24 up
#jexec jail_11 route add default 192.168.4.1 # proxy

# tests
ping="ping -q -c 1"
host=192.168.4.254
proxy=192.168.4.1

echo "host: $host"
echo "proxy: $proxy"

echo -n "ping host to self..."
$ping $host > /dev/null
echo "OK"

echo -n "ping host to proxy..."
$ping $proxy > /dev/null
echo "OK"

echo -n "ping proxy to self..."
jexec proxy $ping $proxy > /dev/null
echo "OK"

echo -n "ping proxy to host..."
jexec proxy $ping $host > /dev/null
echo "OK"
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






































































































































Deleted share/experiments/jail-proxy/pf.conf.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
ext_if = "em0"
proxy_if = "proxy-bridge"
proxy_net = "192.168.4.0/24"

#set skip on lo
#scrub in

#nat log (all) on $ext_if from $proxy_net -> ($ext_if:0)
nat log (all) on $ext_if to 1.1.1.1 -> ($ext_if:0)
#nat on $proxy_if from ! 192.168.4.1 to ! $proxy_if -> ($ext_if:0)

pass out
#block in
pass in

pass log (all) from $proxy_net
pass log (all) to $proxy_net
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


































Deleted share/experiments/make-jails/basic/Makefile.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
# make the base dir
# extract base.txz
# start it
# stop it
# restart it
# clean up base
# copy config files
# destroy
NAME=		basic
ROOT=		/jails/${NAME}
DISTDIR=	${.CURDIR}/../dist
BASE.TXZ=	${DISTDIR}/13.2-RELEASE-base.txz
BASE_DOWNLOAD=	https://download.freebsd.org/releases/amd64/13.2-RELEASE/base.txz
#IP=		192.168.2.3/24
SCRIPTS=	${.CURDIR}/../scripts
IFACE?=		${NAME}
CONF=		${NAME}.conf

.MAIN: all
.PHONY: all login start stop restart

all: ${ROOT} ${ROOT}/COPYRIGHT

${ROOT}:
	mkdir ${ROOT}

${ROOT}/COPYRIGHT: ${BASE.TXZ}
	tar -C ${ROOT} -xf ${BASE.TXZ}
	touch ${.TARGET}

${BASE.TXZ}:
	@mkdir -p ${DISTDIR}
	fetch -q -o ${BASE.TXZ} ${BASE_DOWNLOAD}

start:
	jail -f ${CONF} -c ${NAME}

stop:
	jail -f ${CONF} -r ${NAME}

restart: stop start

login:
	jexec ${NAME} login -f root
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
























































































Deleted share/experiments/make-jails/basic/basic.conf.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
basic {
  $ip=192.168.2.3/24;
  path=/jails/${name};
  vnet;
  host.hostname=${name};
  exec.clean;
  exec.prepare="~patmaddox/jails/scripts/jlif ${name} ${ip}";
  exec.start="sh /etc/rc";
  exec.stop="sh /etc/rc.shutdown jail";
  exec.release="ifconfig ${name}a destroy";
  mount.devfs;
  allow.mount;
  allow.mount.devfs;
  allow.mount.tmpfs;
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






























Deleted share/experiments/make-jails/scripts/jlif.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
#!/bin/sh
main()
{
    name=$1; shift
    ip=$2; shift

    iface=${name}

    aname=$(ifconfig epair create up)
    bname=$(echo ${aname} | sed -e 's/a$/b/')

    ifconfig jails addm ${aname}
    ifconfig ${aname} name ${iface}a
    ifconfig ${bname} name ${iface}b
}

main "${@}"
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


































Deleted share/experiments/man-compare/Makefile.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
all: _build/foo-pandoc.1 _build/foo-md2man.1 _build/foo-pandoc.1.html # _build/foo-pandoc.1.pdf

_build/foo-pandoc.1: foo.1.md
	@mkdir -p ${.TARGET:H}
	pandoc --standalone --to man ${.ALLSRC} -o ${.TARGET}

_build/foo-md2man.1: foo.1.md
	@mkdir -p ${.TARGET:H}
	go-md2man -in ${.ALLSRC} -out ${.TARGET}

_build/foo-pandoc.1.html: foo.1.md
	@mkdir -p ${.TARGET:H}
	pandoc --standalone --to html ${.ALLSRC} -o ${.TARGET}

_build/foo-pandoc.1.pdf: foo.1.md
	@mkdir -p ${.TARGET:H}
	pandoc --standalone --to pdf ${.ALLSRC} -o ${.TARGET}

<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




































Deleted share/experiments/man-compare/README.md.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
# Comparing man page authoring tools

Man pages are super useful, but I find the writing format really ugly.
There has to be a better way... right?
This is my exploration into different tools for authoring man pages.

Possible tools:

- straight up mdoc / mandoc
- [org-mode](https://orgmode.org/worg/org-tutorials/org-e-man-documentation.html) (exports to groff)
- docbook
  - [docbook2mdoc](https://mandoc.bsd.lv/docbook2mdoc/)
  - [laserhammer](https://github.com/trasz/laserhammer)
- POD
  - [pod2mdoc](https://mandoc.bsd.lv/pod2mdoc/)
- asciidoc
  - [asciidoctor](https://docs.asciidoctor.org/asciidoctor/latest/manpage-backend/)
- markdown to man
  - [markdown2man](https://manpages.ubuntu.com/manpages/lunar/man1/markdown2man.1.html)
  - pandoc - lots of blog posts about this, seems maybe good
  - [ronn](https://github.com/rtomayko/ronn)
  - [md2man](https://github.com/sunaku/md2man)
  - [go-md2man](https://github.com/cpuguy83/go-md2man)
  - [um](https://github.com/sinclairtarget/um) - tool to maintain your own copies of man pages
  
in ports:

- docbook2mdoc
- emacs
- pod2mdoc
- asciidoc
- pandoc
- go-md2man
- laserhammer (no maintainer)

tried, rejected:

- ronn

not in ports:

- asciidoctor
- markdown2man
- md2man
- um

Keyword I'm looking for is "mandoc converter" or "mdoc converter"

Questions:

- how can I make emacs read a specific file as a man page?

Notes:

- freebsd uses `mdoc(7)` format, rendered by `mandoc(1)`
- `mandoc(1)` outputs to ASCII, HTML, PDF
- mdoc is a pretty big language
- [blog post about taking notes in man format](https://gpanders.com/blog/write-your-own-man-pages/)
  - actually about markdown, but rendering to man
  
pandoc:

- https://gabmus.org/posts/man_pages_with_markdown_and_pandoc/
- https://gpanders.com/blog/write-your-own-man-pages/
- https://www.dlab.ninja/2021/10/how-to-write-manpages-with-markdown-and.html?m=1
- https://eddieantonio.ca/blog/2015/12/18/authoring-manpages-in-markdown-with-pandoc/
- http://jeromebelleman.gitlab.io/posts/publishing/manpages/

Related:

I want good general documentation tools as well, for writing reference manuals / guides.
FreeBSD has an excellent documentation toolchain, as does GNU.
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
















































































































































Deleted share/experiments/man-compare/foo.1.md.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
% foo(1) | User Commands
% Pat Maddox

# NAME

foo

# SYNOPSIS

`foo [-bar -baz]`  
`foo [--qux arg1 ...]`

# DESCRIPTION

This is foo.
It is the first command.
You will probably run it.

# ENVIRONMENT

`BAR`
: same as `-bar`

# EXIT STATUS

1
: something went wrong

2
: something went really wrong

3
: run for cover

# EXAMPLES

foo with bar:
: `foo -bar`

foo with qux:
: `foo --qux file1 file2`

foo with many qux:
:
```
foo --qux file1
  file2
  file3
```

# COMPATIBILITY

FreeBSD >= 13.2

# SEE ALSO

bar(1)

# STANDARDS

foo.conf(5)

# HISTORY

Widely considered to be the first program ever written.
Yes, it precedes "hello world".

# BUGS

No way.
I code good.
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<














































































































































Deleted share/experiments/pof/Makefile.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
.PHONY: build test clean

build: _build/pof

test: _build/pof_test _build/pof
	cd test && PATH=${.CURDIR}/_build:$$PATH pof_test

_build/pof: src/pof.sh
	@mkdir -p ${.TARGET:H}
	shtk build -o ${.TARGET} src/pof.sh

_build/pof_test: test/pof_test.sh
	@mkdir -p ${.TARGET:H}
	shtk build -o ${.TARGET} -m shtk_unittest_main test/pof_test.sh

clean:
	rm -rf _build
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


































Deleted share/experiments/pof/README.md.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# pof - pile of files

A simple, content-addressable key-value store.

Goals:

- lightweight
- clear interface
- trustworthy
- fast enough

Future:

- content-defined chunking
- lock local files - don't need to store a second copy of chunks, and can read directly from a file if requested

## Usage

local:

```sh
pof put file://.pof foobar            # => sha256-abc123
pof get file://.pof sha256-abc123 bar # write to bar
pof get file://.pof sha256-abc123 -   # write to stdout

pof put -c md5 file://.pof foobar  # => md5-def456
```

ssh:

```sh
pof put ssh://nas/.pof foobar            # => sha256-abc123
pof get ssh://nas/.pof sha256-abc123 bar # write to bar
pof get ssh://nas/.pof sha256-abc123 -   # write to stdout
```

## File Format

Inspired by Fossil SCM

A POF archive is an unordered set of artifacts.
Artifacts may either be control artifacts, or content artifacts.
pof infers control artifacts from their internal structure, which are defined as cards.

An artifact card begins with the artifact checksum, followed by an ordered list of chunks, which are themselves artifacts.
Each chunk line includes the checksum, number of bytes, and byte offset in the file.
The final line includes a checksum of the artifact card content up to that point, including a new line.

```
A sha256-abc123 1000
F sha256-def456 100 0
F sha256-ghi789 800 100
F sha256-jkl0ab 100 900
Z sha256-foobar 91
```

In the above example, the `Z sha256-foobar 91` shows the checksum of the preceeding content, and the number of bytes.

## Protocol

Chunking files allows nodes to exchange only the chunks that the other side needs. 

put:

1. source generates an artifact card
2. source sends artifact card to target
3. target replies with a list of artifacts it needs
4. source sends a card with artifact contents
5. target replies with empty list of artifacts needed

e.g.

source generates a card to send to target:

```
A sha256-abc123 1000
F sha256-def456 100 0
F sha256-ghi789 800 100
F sha256-jkl0ab 100 900
Z sha256-foobar 91
```

target replies with a list of artifacts it needs:

```
A sha256-abc123
F sha256-ghi789
Z sha256-dogcat 32
```

source sends a card with artifact contents:

```
B sha256-ghi789 800 <bytes>
Z sha256-froggy 28
```

target replies with empty list of artifacts needed:

```
A sha256-abc123
Z sha256-spider 16
```

## notes

While not necessarily implemented using HTTP, the protocol is very similar to REST / resource-oriented.
GET and PUT are the main actions.
GET may result in a success, not found, or redirect to a manifest.
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


























































































































































































































Deleted share/experiments/pof/src/pof.sh.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
shtk_import cli

set -e

main() {
    local log_to_stderr
    local o
    while getopts 'e' o; do
	case "${o}" in
	    e)
		log_to_stderr="1"
		shift
		;;
	    *)
		shtk_cli_error "Unknown option -${e}"
		;;
	esac
    done
    local cmd="${1}"; shift
    pof_${cmd} "${@}"
}

pof_put() {
    local protocol
    local repo
    parse_url "${1}"; shift
    local file="${1}"; shift
    local sum=sha256-$(sha256sum -q $file)

    case "${protocol}" in
	file)
	    cp $file $repo/$sum
	    echo $sum
	    ;;
	stdio)
	    transmit_file $file $sum
	    read response
	    if [ "${log_to_stderr}" = "1" ]; then
		echo "$response" 1>&2
	    else
		echo "$response"
	    fi
	    ;;
	*)
	    shtk_cli_error "$protocol is not a valid pof protocol"
	    ;;
    esac
}

pof_get() {
    local protocol
    local repo
    parse_url "${1}"; shift
    local sum="${1}"; shift
    local output="${1}"; shift
    local file=$repo/$sum

    case "${protocol}" in
	file)
	    if [ "${output}" = "-" ]; then
		cat $file
	    else
		cp $file $output
	    fi
	    ;;
	stdio)
	    receive_file $output $sum
	    ;;
	*)
	    shtk_cli_error "$protocol is not a valid pof protocol"
	    ;;
    esac
}

pof_daemon() {
    local repo="${1}"; shift
    local infiles=${repo}/.infiles
    mkdir -p ${infiles}
    local infile=$(mktemp -p ${infiles} -t pof.infile)
    local line
    read line
    local control=$(echo "${line}" | cut -w -f 1)
    case "${control}" in
	P)
	    local begsum=$(echo "${line}" | cut -w -f 2)
	    local size=$(echo "${line}" | cut -w -f 3)
	    dd of=${infile} bs=${size} count=1 2>/dev/null
	    read line
	    local control=$(echo "${line}" | cut -w -f 1)
	    local endsum=$(echo "${line}" | cut -w -f 2)
	    if [ "${control}" != "Z" ]; then
		shtk_cli_error "last line must be 'Z <sum>"
		exit 1
	    fi
	    if [ "${begsum}" != "${endsum}" ]; then
		shtk_cli_error "start sum ${begsum} does not match end sum ${endsum}"
		exit 1
	    fi
	    
	    local sum=sha256-$(sha256sum -q ${infile})
	    if [ "${sum}" != "${endsum}" ]; then
		shtk_cli_error "received sum ${endsum} does not match calculated sum ${sum}"
		exit 1
	    fi
	    mv ${infile} ${repo}/${sum}
	    echo "${sum}"
	    ;;
	G)
	    local requested_sum=$(echo "${line}" | cut -w -f 2)
	    local file=${repo}/${requested_sum}
	    if [ ! -f "${file}" ]; then
		shtk_cli_error "Cannot find file ${file}"
		exit 1
	    fi
	    local sum=sha256-$(sha256sum -q ${file})
	    if [ "${requested_sum}" != "${sum}" ]; then
		shtk_cli_error "requested sum ${requested} does not match calculated sum ${sum}"
		exit 1
	    fi
	    local size=$(stat -f %z $file)
	    echo "A ${sum} ${size}"
	    cat ${file}
	    echo "Z ${sum}"
	    ;;
	*)
	    shtk_cli_error "unknown control character ${control}"
	    exit 1
	    ;;
    esac
}

transmit_file() {
    local file="${1}"; shift
    local sum="${1}"; shift
    local size=$(stat -f %z $file)
    echo "P ${sum} ${size}"
    cat ${file}
    echo "Z ${sum}"
}

receive_file() {
    local outfile="${1}"; shift
    local infile="${outfile}.infile"
    local sum="${1}"; shift
    echo "G ${sum}"

    local line
    read line
    local control=$(echo "${line}" | cut -w -f 1)
    local begsum=$(echo "${line}" | cut -w -f 2)
    local size=$(echo "${line}" | cut -w -f 3)
    if [ "${control}" != "A" ]; then
	shtk_cli_error "first line must be 'A <sum> <size>'"
    fi
    if [ "${outfile}" = "-" ]; then
	if [ "${log_to_stderr}" = "1" ]; then
	    dd bs=${size} count=1 2>/dev/null | tee 1>&2
	else
	    dd bs=${size} count=1 2>/dev/null
	fi
    else
	dd of=${infile} bs=${size} count=1 2>/dev/null
    fi	    
    read line
    if [ "${outfile}" = "-" ]; then
 	exit 0
    fi
    local control=$(echo "${line}" | cut -w -f 1)
    local endsum=$(echo "${line}" | cut -w -f 2)
    if [ "${control}" != "Z" ]; then
	shtk_cli_error "last line must be 'Z <sum>"
	exit 1
    fi
    if [ "${begsum}" != "${endsum}" ]; then
	shtk_cli_error "start sum ${begsum} does not match end sum ${endsum}"
	exit 1
    fi

    local sum=sha256-$(sha256sum -q ${infile})
    if [ "${sum}" != "${endsum}" ]; then
	shtk_cli_error "received sum ${endsum} does not match calculated sum ${sum}"
	exit 1
    fi
    mv ${infile} ${outfile}
}

parse_url() {
    local url="${1}"; shift
    if [ "${url}" = "-" ]; then
	protocol=stdio
	repo="-"
    else
	protocol=$(echo "${url}" | grep -o '^[[:alnum:]]*:\/\/' | sed 's|://$||')
	repo=$(echo "${url}" | grep -o ':\/\/.*$' | sed 's|^://||')
    fi
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































































































































































































































































































































































































Deleted share/experiments/pof/test/pof_test.sh.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
shtk_import unittest

shtk_unittest_add_test put_get
put_get_test() {
    pofdir=$(mktemp -d -t pof.test)
    file1=$pofdir/file1
    file2=$pofdir/file2
    sum=$pofdir/sum

    echo "hello pof" > $file1
    assert_command -o not-empty -o save:$sum pof put file://$pofdir $file1

    assert_command pof get file://$pofdir $(cat $sum) $file2
    assert_file file:$file1 $file2

    assert_command -o inline:"hello pof\n" pof get file://$pofdir $(cat $sum) -
}

shtk_unittest_add_test stdio_client_server
stdio_client_server_test() {
    pofdir=$(mktemp -d -t pof.test)
    file1=$pofdir/file1
    file2=$pofdir/file2
    sum=$pofdir/sum

    echo "hello pof" > $file1
    assert_command -e save:$sum socat exec:"pof -e put - $file1" exec:"pof daemon $pofdir"

    assert_command socat exec:"pof get - $(cat $sum) $file2" exec:"pof daemon $pofdir"
    assert_file file:$file1 $file2

    assert_command -e inline:"hello pof\n" socat exec:"pof -e get - $(cat $sum) -" exec:"pof daemon $pofdir"
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


































































Deleted share/experiments/vale/README.md.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
# vale experiments

Notes:

- This requires the `nm_test` jail to have a devfs ruleset that shows `netmap` device.
- Set up a bridge with one of the `bridge_*.sh` scripts, and then run tests.
- It's possible that test results differ across machines.

## bridges

- `bridge_epair.sh` - crashes. creates epair devices on each host, bridges (?) them using `valectl`.
  I think `valectl` bridges them, but I'm not sure.

## tests

- `ping 192.168.6.2`
- `jexec nm_test ping 192.168.6.1`
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


































Deleted share/experiments/vale/bridge_epair.sh.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
#!/bin/sh
set -e

ifconfig epair10 create inet 192.168.6.1/24 up
ifconfig epair10b up
valectl -h vale1:epair10b

service jail onestart nm_test
j="jexec nm_test"
$j ifconfig epair11 create inet 192.168.6.2/24 up
$j ifconfig epair11b up
$j valectl -h vale1:epair11b

valectl
<
<
<
<
<
<
<
<
<
<
<
<
<
<




























Deleted share/experiments/valgrind-test/Makefile.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
.PHONY: all clean valgrind impls
IMPL?=	c \
	c-heap \
	c-leak \
	crystal \
	go \
	rust \
	d

PROGS=	${IMPL:S/^/_build\/hello-/g}
CFLAGS+=	-Wall -Wextra -Werror

all: ${PROGS}

clean:
	rm -rf _build

impls:
	@echo ${IMPL} | tr ' ' '\n'

valgrind: all
	for p in ${PROGS}; do valgrind --leak-check=full ./$$p; done

_build/hello-c: hello.c
	@mkdir -p ${.TARGET:H}
	cc ${CFLAGS} -o ${.TARGET} ${.ALLSRC}

_build/hello-c-heap: hello-heap.c
	@mkdir -p ${.TARGET:H}
	cc ${CFLAGS} -o ${.TARGET} ${.ALLSRC}

_build/hello-c-leak: hello-leak.c
	@mkdir -p ${.TARGET:H}
	cc ${CFLAGS} -o ${.TARGET} ${.ALLSRC}

_build/hello-crystal: hello.cr
	@mkdir -p ${.TARGET:H}
	crystal build -o ${.TARGET} ${.ALLSRC}

_build/hello-go: hello.go
	@mkdir -p ${.TARGET:H}
	go build -o ${.TARGET} ${.ALLSRC}

_build/hello-rust: hello.rs
	@mkdir -p ${.TARGET:H}
	rustc -o ${.TARGET} ${.ALLSRC}

_build/hello-d: hello.d
	@mkdir -p ${.TARGET:H}
	ldc2 -of ${.TARGET} ${.ALLSRC}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




































































































Deleted share/experiments/valgrind-test/hello-heap.c.

1
2
3
4
5
6
7
8
9
10
#include <stdio.h>
#include <string.h>
#include <stdlib.h>

int main() {
  char *hello = "hello world\n";
  char *str = strdup(hello);
  printf("%s", str);
  free(str);
}
<
<
<
<
<
<
<
<
<
<




















Deleted share/experiments/valgrind-test/hello-leak.c.

1
2
3
4
5
6
7
8
9
#include <stdio.h>
#include <string.h>
#include <stdlib.h>

int main() {
  char *hello = "hello world\n";
  char *str = strdup(hello);
  printf("%s", str);
}
<
<
<
<
<
<
<
<
<


















Deleted share/experiments/valgrind-test/hello.c.

1
2
3
4
5
#include <stdio.h>

int main() {
  printf("hello world\n");
}
<
<
<
<
<










Deleted share/experiments/valgrind-test/hello.cr.

1
puts "hello world"
<


Deleted share/experiments/valgrind-test/hello.d.

1
2
3
4
5
6
import std.stdio;

void main()
{
  writeln("hello world");
}
<
<
<
<
<
<












Deleted share/experiments/valgrind-test/hello.go.

1
2
3
4
5
6
7
package main

import "fmt"

func main() {
	fmt.Println("hello world")
}
<
<
<
<
<
<
<














Deleted share/experiments/valgrind-test/hello.rs.

1
2
3
fn main() {
    println!("hello world");
}
<
<
<






Deleted share/notes/add-freebsd-package-categories.md.

1
2
3
4
5
6
7
# Add FreeBSD package categories

In make.conf:

`VALID_CATEGORIES+= my_category`

- `/usr/local/etc/poudriere.d/make.conf` (or more specific conf files)
<
<
<
<
<
<
<














Deleted share/notes/disable-ssh-agent-xfce4.md.

1
2
3
4
5
# Disable ssh-agent in xfce4

`xfconf-query -c xfce4-session -p /startup/ssh-agent/enabled -n -t bool -s false`

from https://notebook.niklaas.eu/xfce-disable-ssh-agent/
<
<
<
<
<










Deleted share/notes/freebsd-swap-caps-lock-and-control.md.

1
2
3
4
5
6
7
8
9
10
11
12
13
# Swap caps lock and control

Per-session: `setxkbmap -option ctrl:nocaps`

Permanent:

```
Section "InputClass"
	Identifier "Map caps lock to ctrl"
        MatchIsKeyboard "on"
        Option "XkbOptions" "ctrl:nocaps"
EndSection
```
<
<
<
<
<
<
<
<
<
<
<
<
<


























Deleted share/notes/freebsd-zfs-on-root-gcp-aws-poudriere.md.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
# FreeBSD ZFS-on-root on GCP and AWS with Poudriere

## GCP

- Make a disk.raw file, then `tar --format=gnutar -zcf myimage.tar.gz disk.raw` (from src/release GCE tool)
- Upload it to GCP storage
- `gcloud compute images create myimage --source-uri=gs://mybucket/myimage.tar.gz --guest-os-features=UEFI_COMPATIBLE`

## AWS

- Upload image to S3
- Create a snapshot: `aws ec2 import-snapshot --disk-container Format=raw,Url=s3://mybucket/myimage.img`
- Make an AMI (use UEFI boot)
  I used console, need to figure out command line
<
<
<
<
<
<
<
<
<
<
<
<
<
<




























Deleted share/notes/xfce4-suspend-resume-menu-lock-display.md.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
# xfce4 suspend / resume menu - lock display on suspend

xfce4 settings -> session and startup -> enable "lock screen before sleep"

need to use the xfce4 menu suspend rather than `doas zzz`

https://forums.freebsd.org/threads/xfce-logout-menu-lack-of-suspend-hibernate-menu.74771/

## Activate stop / suspend options

```
polkit.addRule(function(action, subject) {
  if((action.id == "org.freedesktop.consolekit.system.restart" || action.id == "org.freedesktop.consolekit.system.stop" || action.id == "org.freedesktop.consolekit.system.suspend") && subject.isInGroup("operator")) {
    return polkit.Result.YES;
  }
});
```
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


































Deleted share/pubkeys.

1
2
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIM5W1fmVtiqaU1b/ghN9WmQv4fUgaVu6nZp56Cihhp3P patmaddox@beastie
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJsP0Iutc3/5X4vu6DQl26aWAgKdKJwz7fQ1waTW+iP7 patmaddox@gulliver
<
<




Deleted share/scripts/phab-delflags.sh.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
#!/bin/sh
# recursively delete all of my flags on phabricator
token=${TOKEN:?}

delete_flags() {
    echo "about to delete some flags"
    fids=$(curl -s https://reviews.freebsd.org/api/flag.query -d api.token=$token | fx 'x.result.map(o => o.id).join(" ")')

    if [ -n "$fids" ]; then
	for fid in $fids; do
	    curl -s https://reviews.freebsd.org/api/flag.delete -d api.token=api-5ynsxilnm43tgkm2y2sbokowc5wi -d id=$fid > /dev/null
	    echo "deleted $fid"
	done
	delete_flags
    fi
}

delete_flags
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




































Deleted src/LICENSE.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
BSD 2-Clause License

Copyright (c) 2022, Pat Maddox

Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:

1. Redistributions of source code must retain the above copyright notice, this
   list of conditions and the following disclaimer.

2. Redistributions in binary form must reproduce the above copyright notice,
   this list of conditions and the following disclaimer in the documentation
   and/or other materials provided with the distribution.

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
















































Added src/backup/TODO.md.











>
>
>
>
>
1
2
3
4
5
# TODO

- [x] remote backup
- [ ] local backup
- [ ] backup config file (basically just sets destination)

Added src/backup/backup.sh.







































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
#!/bin/sh
set -e
hostname=$(hostname)
lhostid=$(cat /etc/hostid)
rhostid=""
gethostid="cat /mnt/backups/$hostname/hostid"
backuptarget="/mnt/backups/$hostname"
user=$(whoami)
if [ -z "$lhostid" ]; then
    >&2 "error: /etc/hostid is empty"
    exit 1
fi
if [ $hostname = "nas" ]; then
    rhostid=$($gethostid)
else
    rhostid=$(ssh nas "$gethostid")
fi
if [ -z "$rhostid" ]; then
    >&2 "error: remote hostid is empty"
    exit 1
fi
if [ "$lhostid" != "$rhostid" ]; then
    >&2 "error: hostids do not match (local: $lhostid remote: $rhostid)"
    exit 1
fi

if [ $hostname != "nas" ]; then
    backuptarget="$user@nas:$backuptarget"
fi

doas="doas"
if [ ! -f /usr/local/bin/doas ]; then
    doas="sudo"
fi
$doas rsync -aR /boot/loader.conf /etc /usr/local/etc $backuptarget

Added src/c/Makefile.

























>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
CFLAGS= -Wall -Werror # = instead of += because -O2 results in term-loop not incrementing

PROGS=
MAN=

PROGS+= pointer-size
SRCS.pointer-size= pointer_size.c

PROGS+= term-loop
SRCS.term-loop= term_loop.c

.include <bsd.progs.mk>

Added src/c/pointer_size.c.



































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
#include <stdio.h>
#include <malloc.h>
#include <string.h>

int main() {
  char *s1 = strdup("hello");
  size_t size1 = malloc_usable_size(s1);
  int len1 = strlen(s1);
  printf("s1\n\tsize: %lu\n\tlen: %i\n", size1, len1);

  char *s2 = strdup("hello world are you there");
  size_t size2 = malloc_usable_size(s2);
  int len2 = strlen(s2);
  printf("s2\n\tsize: %lu\n\tlen: %i\n", size2, len2);

  return 0;
}

Added src/c/term_loop.c.









































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
#include <stdio.h>
#include <signal.h>

void report();

static int count = 0;

int main() {
  signal(SIGINFO, report);

  while(1) {
    ++count;
  }

  return 0;
}

void report() {
  printf("%i\n", count);
}

Added src/elixir-benchmarks/.gitignore.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
*.a
*.o
*.pico
*.po
*.so
*.so.debug
*.so.full
.depend.nif*

Added src/elixir-benchmarks/BSDmakefile.













>
>
>
>
>
>
1
2
3
4
5
6
LIB= nif_uuid_gen
SRCS= nif_uuid_gen.c
SHLIB_NAME= ${LIB}.so
MK_MAN= no

.include <bsd.lib.mk>

Added src/elixir-benchmarks/nif_uuid_gen.c.

























































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
#include <stdio.h>
#include <sys/uuid.h>
#include <uuid.h>
#include "/usr/local/lib/erlang25/usr/include/erl_nif.h"

static ERL_NIF_TERM uuid_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) {
  char *uuid_str;
  uuid_t uuid;
  uint32_t status;

  uuid_create(&uuid, &status);
  if (status != uuid_s_ok) {
    return enif_raise_exception(env, enif_make_string(env, "error creating uuid", ERL_NIF_LATIN1));
  }

  uuid_to_string(&uuid, &uuid_str, &status);
  if (status != uuid_s_ok) {
    return enif_raise_exception(env, enif_make_string(env, "error converting uuid to string", ERL_NIF_LATIN1));
  }

  return enif_make_string(env, uuid_str, ERL_NIF_LATIN1);
}

static ErlNifFunc nif_funcs[] = {
  {"uuid", 0, uuid_nif}
};

ERL_NIF_INIT(Elixir.UuidNif, nif_funcs, NULL, NULL, NULL, NULL)

Added src/elixir-benchmarks/uuid.exs.







































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
Mix.install([
  {:faker, "~> 0.17.0"},
  {:uuid, "~> 2.0", hex: :uuid_erl},
])

require Logger

benchmark = fn name, func ->
  Logger.debug("BEG #{name}")
  {time, _} = :timer.tc(fn ->
    Enum.each(1..2_000_000, fn _ -> func.() end)
  end)
  seconds = time / 1_000_000
  Logger.debug("END #{name} (#{seconds} seconds)")
end

# uuid_erl - disabled because string version is slow
#benchmark.(":uuid.get_v4_random()", fn -> :uuid.get_v4_urandom() end)
#benchmark.(":uuid.get_v4_random() to string", fn -> :uuid.get_v4_urandom() |> :uuid.uuid_to_string() end)

# faker
benchmark.("faker", fn -> Faker.UUID.v4() end)

defmodule UuidNif do
  @on_load :init

  def init do
    :ok = :erlang.load_nif("./nif_uuid_gen", 0)
  end

  def uuid, do: exit(:nif_not_loaded)
end

# NIF
benchmark.("nif", &UuidNif.uuid/0)

Added src/fake-rootkit/Makefile.































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
.PHONY: setup spec verify rootkit verify-offline clean

SPEC= spec.mtree

setup:
	mkdir -p host/bin
	cp bin-good/mtree host/bin/
	cp bin-good/md5sum host/bin/

spec: ${SPEC}
${SPEC}:
	PATH=./host/bin:$$PATH mtree -c -p host > ${.TARGET}

verify: ${SPEC}
	PATH=./host/bin:$$PATH mtree -p host < ${SPEC}
	PATH=./host/bin:$$PATH md5sum host/bin/mtree
	PATH=./host/bin:$$PATH md5sum host/bin/md5sum

verify-offline:
	mtree -p host < ${SPEC} || echo "mtree does not match!"
	md5sum host/bin/mtree
	md5sum host/bin/md5sum

rootkit:
	cp host/bin/mtree host/bin/orig-mtree
	cp host/bin/md5sum host/bin/orig-md5sum
	cp bin-bad/mtree host/bin/
	cp bin-bad/md5sum host/bin/

clean:
	rm -rf host/bin spec.mtree

Added src/fake-rootkit/README.md.















































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
# a fake rootkit to thwart mtree and md5sum verification

*this is not a real rootkit* (that's what they all say)

[The FreeBSD Handbook suggests using `mtree(8)` to verify system files and binaries.](https://docs.freebsd.org/en/books/handbook/security/#security-ids)

**What if mtree itself is compromised?**

This fake rootkit simulates this scenario, by replacing two binaries used for verification - `mtree(8)` and `md5sum(1)` - with compromised binaries that report misleading results:

- `bin-bad/mtree` - no-op, so the verification appears to succeed even though the tree signature has changed
- `bin-bad/md5sum` - calls original `md5sum`, passing paths to original `md5sum` and `mtree` if necessary, to disguise the fact that these have been compromised

Obviously it would not be hard to expose this naive "rootkit."
It simply illustrates how to anticipate and mislead certain verification techniques.

*Can a compromised system be trusted to report its file signatures?
As of now, I don't think so.
**A separate trusted system needs to verify the data at rest.***

## Example

```
$ make setup
mkdir -p host/bin
cp bin-good/mtree host/bin/
cp bin-good/md5sum host/bin/

$ make spec
PATH=./host/bin:$PATH mtree -c -p host > spec.mtree

$ make verify
PATH=./host/bin:$PATH mtree -p host < spec.mtree
PATH=./host/bin:$PATH md5sum host/bin/mtree
efa74a99c24a881ca30f438d8ee79441  host/bin/mtree
PATH=./host/bin:$PATH md5sum host/bin/md5sum
b13d64791e86f1381590d9cf87be8c41  host/bin/md5sum

$ make rootkit
cp host/bin/mtree host/bin/orig-mtree
cp host/bin/md5sum host/bin/orig-md5sum
cp bin-bad/mtree host/bin/
cp bin-bad/md5sum host/bin/

$ make verify
PATH=./host/bin:$PATH mtree -p host < spec.mtree
I am root mtree
PATH=./host/bin:$PATH md5sum host/bin/mtree
I am root md5sum
efa74a99c24a881ca30f438d8ee79441  host/bin/mtree
PATH=./host/bin:$PATH md5sum host/bin/md5sum
I am root md5sum
b13d64791e86f1381590d9cf87be8c41  host/bin/md5sum

$ make verify-offline
mtree -p host < spec.mtree || echo "mtree does not match!"
bin:    modification time (Fri Nov 10 01:15:11 2023, Fri Nov 10 01:15:18 2023)
extra: bin/orig-md5sum
bin/md5sum: 
	size (26, 241)
	modification time (Fri Nov 10 01:15:11 2023, Fri Nov 10 01:15:18 2023)
extra: bin/orig-mtree
bin/mtree: 
	size (29, 74)
	modification time (Fri Nov 10 01:15:11 2023, Fri Nov 10 01:15:18 2023)
mtree does not match!
md5sum host/bin/mtree
9512f5d8d9d8adf507dc76cffbe974e9  host/bin/mtree
md5sum host/bin/md5sum
a4d4589f492eef94086e329738b27107  host/bin/md5sum
```

Added src/fake-rootkit/bin-bad/md5sum.

























>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
#!/bin/sh
sum=/sbin/md5sum

echo "I am root md5sum"

if echo "$1" | grep -q mtree; then
    $sum host/bin/orig-mtree | sed 's/orig-//'
elif echo "$1" | grep -q md5sum; then
    $sum host/bin/orig-md5sum | sed 's/orig-//'
else
    $sum $*
fi

Added src/fake-rootkit/bin-bad/mtree.











>
>
>
>
>
1
2
3
4
5
#!/bin/sh

# no-op - don't even bother verifying!

echo "I am root mtree"

Added src/fake-rootkit/bin-good/md5sum.





>
>
1
2
#!/bin/sh
/sbin/md5sum $*

Added src/fake-rootkit/bin-good/mtree.





>
>
1
2
#!/bin/sh
/usr/sbin/mtree $*

Added src/fake-rootkit/host/data/bar.txt.



>
1
this is bar

Added src/fake-rootkit/host/data/foo.txt.



>
1
this is foo

Added src/ffi-adventure/README.md.









































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
# FFI Adventure

Explorations in calling C libraries from other languages.

## Languages for consideration

- C (reference)
- D
- Elixir
- Go
- Lua - `/usr/libexec/flua`
- Nim
- Pony
- Rust
- V
- Zig

## Languages to investigate

I know less about these, and may have to port some of them to FreeBSD.

- C++
- Common Lisp
- Crystal
- Haxe
- Jai
- Nit
- OCaml
- Odin
- Vale

## Examples

- hello world (pass a string to C)
- upcase (modify string in calling language)
- intentional memory leak (how easy is it?)
- concurrency (data races?)
- various other memory safety issues

## Questions

- Do any languages let you restrict access to a C library? e.g. for [a library that's not thread-safe](c093c9c4c), ensure that only one thread can access it at a time? 

## Notes

- Go was a bit easier to get going than Rust - just inline CFLAGS and LDFLAGS in the code.
  Rust setup took more time, appears to be more configurable.
- Rust provided compiler errors, e.g. CString is not FFI-safe (it needs to be turned into a pointer)
- Rust appears to automatically deallocate the memory, whereas Go needs to call C.free
- Pony was easy to get going.
- Go can use the library's header file; Rust and Pony appear to have their own definitions
- Mutation seems to require copying memory (especially for strings, because they're null-terminated)

Added src/ffi-adventure/concurrency/Makefile.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
all:
	for m in concurrency-*/Makefile; do ${MAKE} -C $$(dirname $$m); done

run:
	for m in concurrency-*/Makefile; do echo "=== $$(dirname $$m):"; ${MAKE} -C $$(dirname $$m) run; done

clean:
	for m in concurrency-*/Makefile; do ${MAKE} -C $$(dirname $$m) clean; done

Added src/ffi-adventure/concurrency/concurrency-c/Makefile.



























>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
_build/badbank: badbank.c _build/libbadbank.a
	@mkdir -p ${.TARGET:H}
	cc -o ${.TARGET} -Wall -Werror -L_build -lbadbank -lpthread ${.ALLSRC:[1]}

_build/libbadbank.a: libbadbank.c
	@mkdir -p ${.TARGET:H}
	cc -fPIC -c -o ${.TARGET} -Wall -Werror ${.ALLSRC}

run: _build/badbank
	./_build/badbank

clean:
	rm -rf _build

Added src/ffi-adventure/concurrency/concurrency-c/badbank.c.

















































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
#include <stdio.h>
#include <pthread.h>
#include "libbadbank.h"

static void *_deposit() {
  for(int i = 0; i < 1000000; i++) {
    deposit(100);
  }
  return NULL;
}

int main() {
  pthread_t t1;
  pthread_create(&t1, NULL, _deposit, NULL);

  pthread_t t2;
  pthread_create(&t2, NULL, _deposit, NULL);

  pthread_join(t1, NULL);
  pthread_join(t2, NULL);

  printf("Balance: $%i\n", balance());
  return 0;
}

Added src/ffi-adventure/concurrency/concurrency-c/libbadbank.c.

























>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
#include <stdio.h>
#include "libbadbank.h"

static int _balance = 0;

void deposit(int amount) {
  _balance += amount;
}

int balance() {
  return _balance;
}

Added src/ffi-adventure/concurrency/concurrency-c/libbadbank.h.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
#ifndef __LIBBADBANK
#define __LIBBADBANK

void deposit(int);
int balance();

#endif

Added src/ffi-adventure/concurrency/concurrency-go/Makefile.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
_build/badbank: badbank.go
	go build -o ${.TARGET} ${.ALLSRC}

run: _build/badbank
	./_build/badbank

clean:
	rm -rf _build

Added src/ffi-adventure/concurrency/concurrency-go/badbank.go.



































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
package main

/*
#cgo CFLAGS: -I../concurrency-c
#cgo LDFLAGS: -L../concurrency-c/_build -lbadbank
#include <stdlib.h>
#include "libbadbank.h"
*/
import "C"
import "fmt"

func main() {
     defer printBalance()
     done1 := deposit()
     done2 := deposit()
     <-done1
     <-done2
}

func printBalance() {
  fmt.Printf("Balance: $%v\n", C.balance())
}

func deposit() <-chan struct{} {
  done := make(chan struct{})
  go func() {
    for i := 0; i < 1000000; i++ {
      C.deposit(100)
    }
    done <- struct{}{}
  }()
  return done
}

Added src/ffi-adventure/concurrency/concurrency-pony/Makefile.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
_build/badbank: badbank.pony
	ponyc -o ${.TARGET:H} -b ${.TARGET:T} .

run: _build/badbank
	./_build/badbank

clean:
	rm -rf _build

Added src/ffi-adventure/concurrency/concurrency-pony/badbank.pony.





































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
use "collections"
use "path:../concurrency-c/_build"
use "lib:badbank"
use @deposit[None](amount: U32)
use @balance[U32]()

actor Main
  var _env: Env
  var _count: U32 = 0

  new create(env: Env) =>
    _env = env
    start_depositor()
    start_depositor()

  be start_depositor() =>
    let d: Depositor = Depositor.create()
    d.deposit(this)
    _count = _count + 1

  be done() =>
    _count = _count - 1
    if _count == 0 then
      _env.out.print("Balance: $" + @balance().string())
    end

actor Depositor
  be deposit(main: Main) =>
    var count: U32 = 0
    while count < 1000000 do
      @deposit(100)
      count = count + 1
    end
    main.done()

Added src/ffi-adventure/hello-world/Makefile.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
all:
	for m in hello-world-*/Makefile; do ${MAKE} -C $$(dirname $$m); done

run:
	for m in hello-world-*/Makefile; do echo "=== $$(dirname $$m):"; ${MAKE} -C $$(dirname $$m) run; done

clean:
	for m in hello-world-*/Makefile; do ${MAKE} -C $$(dirname $$m) clean; done

Added src/ffi-adventure/hello-world/hello-world-c/Makefile.



























>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
_build/hello: hello.c _build/libhello.a
	@mkdir -p ${.TARGET:H}
	cc -o ${.TARGET} -Wall -Werror -L_build -lhello ${.ALLSRC:[1]}

_build/libhello.a: libhello.c
	@mkdir -p ${.TARGET:H}
	cc -fPIC -c -o ${.TARGET} -Wall -Werror ${.ALLSRC}

run: _build/hello
	./_build/hello

clean:
	rm -rf _build

Added src/ffi-adventure/hello-world/hello-world-c/hello.c.













>
>
>
>
>
>
1
2
3
4
5
6
#include "libhello.h"

int main() {
  hello("C");
  return 0;
}

Added src/ffi-adventure/hello-world/hello-world-c/libhello.c.





















>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
#include <stdio.h>
#include "libhello.h"

void hello(const char *name) {
  printf("Hello, %s!\n", name);
}

void helloWorld() {
  hello("World");
}

Added src/ffi-adventure/hello-world/hello-world-c/libhello.h.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
#ifndef __LIBHELLO
#define __LIBHELLO

void hello(const char *name);
void helloWorld();

#endif

Added src/ffi-adventure/hello-world/hello-world-cpp/Makefile.



















>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
_build/hello: hello.cpp ../hello-world-c/_build/libhello.a
	@mkdir -p ${.TARGET:H}
	c++ -o ${.TARGET} -Wall -Werror -I../hello-world-c -L../hello-world-c/_build -lhello ${.ALLSRC:[1]}

run: _build/hello
	./_build/hello

clean:
	rm -rf _build

Added src/ffi-adventure/hello-world/hello-world-cpp/hello.cpp.



















>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
extern "C"
{
#include "libhello.h"
}

int main() {
  hello("C++");
  return 0;
}

Added src/ffi-adventure/hello-world/hello-world-crystal/Makefile.



















>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
_build/hello: hello.cr
	@mkdir -p ${.TARGET:H}
	crystal build -o ${.TARGET} hello.cr

run: _build/hello
	./_build/hello

clean:
	rm -rf _build

Added src/ffi-adventure/hello-world/hello-world-crystal/hello.cr.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
@[Link(lib: "hello", ldflags: "-L#{__DIR__}/../hello-world-c/_build")]
lib LibHello
  fun hello_world = helloWorld()
  fun hello(name : UInt8*)
end

LibHello.hello_world()
LibHello.hello("Crystal")

Added src/ffi-adventure/hello-world/hello-world-d/Makefile.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
_build/hello: hello.d
	ldmd2 -L=-L../hello-world-c/_build -L=-lhello -of=${.TARGET} ${.ALLSRC}

run: _build/hello
	./_build/hello

clean:
	rm -rf _build

Added src/ffi-adventure/hello-world/hello-world-d/hello.d.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
extern (C) void hello(immutable char *);

import std.string;

void main() {
  hello(std.string.toStringz("D"));
}

Added src/ffi-adventure/hello-world/hello-world-elixir/.formatter.exs.









>
>
>
>
1
2
3
4
# Used by "mix format"
[
  inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"]
]

Added src/ffi-adventure/hello-world/hello-world-elixir/Makefile.





















>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
_build/hello_nif.so: ext/hello_nif.c
	@mkdir -p ${.TARGET:H}
	clang -o _build/hello_nif.o -I/usr/local/lib/erlang/usr/include -I../hello-world-c -c -fPIC ${.ALLSRC}
	clang -shared -o ${.TARGET} -L../hello-world-c/_build -lhello _build/hello_nif.o

run: _build/hello_nif.so
	./hello

clean:
	rm -rf _build

Added src/ffi-adventure/hello-world/hello-world-elixir/README.md.











































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
# HelloWorld

**TODO: Add description**

## Installation

If [available in Hex](https://hex.pm/docs/publish), the package can be installed
by adding `hello_world` to your list of dependencies in `mix.exs`:

```elixir
def deps do
  [
    {:hello_world, "~> 0.1.0"}
  ]
end
```

Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc)
and published on [HexDocs](https://hexdocs.pm). Once published, the docs can
be found at <https://hexdocs.pm/hello_world>.

Added src/ffi-adventure/hello-world/hello-world-elixir/ext/hello_nif.c.





































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
#include <erl_nif.h>
#include "libhello.h"

#define MAXBUFLEN 1024

static ERL_NIF_TERM hello_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) {
  unsigned *len;
  char name[MAXBUFLEN];
  enif_get_string(env, argv[0], name, MAXBUFLEN, ERL_NIF_LATIN1);
  hello(name);
  return enif_make_atom(env, "ok");
}

static ErlNifFunc nif_funcs[] = {
  {"hello", 1, hello_nif}
};

ERL_NIF_INIT(Elixir.HelloWorld, nif_funcs, NULL, NULL, NULL, NULL)

Added src/ffi-adventure/hello-world/hello-world-elixir/hello.































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
#!/usr/bin/env elixir

defmodule HelloWorld do
  @on_load :load_nifs

  def load_nifs do
    :erlang.load_nif('./_build/hello_nif', 0)
  end

  def hello, do: :ok = hello('Elixir') # elrang NIF requires a charlist

  def hello(_name), do: raise "NIF hello/1 not implemented"
end

HelloWorld.hello()

Added src/ffi-adventure/hello-world/hello-world-go/Makefile.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
_build/hello: hello.go ../hello-world-c/_build/libhello.a
	go build -o ${.TARGET} ${.ALLSRC:M*.go}

run: _build/hello
	./_build/hello

clean:
	rm -rf _build

Added src/ffi-adventure/hello-world/hello-world-go/hello.go.









































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
package main

/*
#cgo CFLAGS: -I../hello-world-c
#cgo LDFLAGS: -L../hello-world-c/_build -lhello
#include <stdlib.h>
#include "libhello.h"
*/
import "C"
import "unsafe"

func main() {
	hello()
}

func hello() {
	name := C.CString("Go")
	defer C.free(unsafe.Pointer(name))
	C.hello(name)
}

Added src/ffi-adventure/hello-world/hello-world-nim/Makefile.























>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
PATH:= ${PATH}:/usr/local/nim/bin
.export PATH

_build/hello: hello.nim
	nim c -o:hello --outdir:_build --cincludes:../hello-world-c --clibdir:../hello-world-c/_build -l:-lhello hello.nim

run: _build/hello
	./_build/hello

clean:
	rm -rf _build

Added src/ffi-adventure/hello-world/hello-world-nim/hello.nim.









>
>
>
>
1
2
3
4
# header may be optional since it can find the symbol in lib
proc hello(name: cstring) {.header: "libhello.h", importc: "hello"}

hello("Nim")

Added src/ffi-adventure/hello-world/hello-world-pony/Makefile.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
_build/hello: hello.pony
	ponyc -o ${.TARGET:H} -b ${.TARGET:T} .

run: _build/hello
	./_build/hello

clean:
	rm -rf _build

Added src/ffi-adventure/hello-world/hello-world-pony/hello.pony.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
use "path:../hello-world-c/_build"
use "lib:hello"
use @hello[None](name: Pointer[U8] tag)

actor Main
  new create(env: Env) =>
    @hello("Pony".cstring())

Added src/ffi-adventure/hello-world/hello-world-rust/.cargo/config.toml.





>
>
1
2
[build]
target-dir = "_build"

Added src/ffi-adventure/hello-world/hello-world-rust/.gitignore.



>
1
/target

Added src/ffi-adventure/hello-world/hello-world-rust/Cargo.lock.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3

[[package]]
name = "hello-world-rust"
version = "0.1.0"

Added src/ffi-adventure/hello-world/hello-world-rust/Cargo.toml.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
[package]
name = "hello-world-rust"
version = "0.1.0"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]

Added src/ffi-adventure/hello-world/hello-world-rust/Makefile.





















>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
.PHONY: all run clean

all:
	cargo build

run:
	cargo run

clean:
	cargo clean

Added src/ffi-adventure/hello-world/hello-world-rust/build.rs.









>
>
>
>
1
2
3
4
fn main() {
    println!("cargo:rustc-link-search=../hello-world-c/_build");
    println!("cargo:rustc-link-lib=hello");
}

Added src/ffi-adventure/hello-world/hello-world-rust/src/main.rs.





































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
use std::ffi::CString;
use std::os::raw::c_char;

fn main() {
    myhello()
}

fn myhello() {
    unsafe {
        let name = CString::new("Rust").expect("CString::new failed");
        hello(name.as_ptr());
    }
}

#[link(name = "hello", kind = "static")]
extern "C" {
    fn hello(name: *const c_char);
}

Added src/ffi-adventure/hello-world/hello-world-v/Makefile.



















>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
_build/hello: hello.v
	@mkdir -p ${.TARGET:H}
	v -o ${.TARGET} ${.ALLSRC}

run: _build/hello
	./_build/hello

clean:
	rm -rf _build

Added src/ffi-adventure/hello-world/hello-world-v/hello.v.























>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
#flag -lhello
#flag -L../hello-world-c/_build
#flag -I../hello-world-c
#include "libhello.h"

fn C.hello(&char) int

fn main() {
  C.hello("V".str)
}

Added src/ffi-adventure/hello-world/hello-world-zig/Makefile.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
zig-out/hello: *.zig
	zig build

run:
	zig build run

clean:
	rm -rf zig-cache zig-out

Added src/ffi-adventure/hello-world/hello-world-zig/build.zig.











































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
const std = @import("std");

pub fn build(b: *std.build.Builder) void {
    // Standard target options allows the person running `zig build` to choose
    // what target to build for. Here we do not override the defaults, which
    // means any target is allowed, and the default is native. Other options
    // for restricting supported target set are available.
    const target = b.standardTargetOptions(.{});

    // Standard release options allow the person running `zig build` to select
    // between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
    const mode = b.standardReleaseOptions();

    const exe = b.addExecutable("hello", "src/main.zig");
    exe.setTarget(target);
    exe.setBuildMode(mode);
    exe.addIncludePath("../hello-world-c");
    exe.addLibraryPath("../hello-world-c/_build");
    exe.linkSystemLibrary("hello");
    exe.install();

    const run_cmd = exe.run();
    run_cmd.step.dependOn(b.getInstallStep());
    if (b.args) |args| {
        run_cmd.addArgs(args);
    }

    const run_step = b.step("run", "Run the app");
    run_step.dependOn(&run_cmd.step);

    const exe_tests = b.addTest("src/main.zig");
    exe_tests.setTarget(target);
    exe_tests.setBuildMode(mode);

    const test_step = b.step("test", "Run unit tests");
    test_step.dependOn(&exe_tests.step);
}

Added src/ffi-adventure/hello-world/hello-world-zig/src/main.zig.





















>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
const std = @import("std");

const c = @cImport({
  @cInclude("libhello.h");
});

pub fn main() !void {
  c.hello("Zig");
}

Added src/ffi-adventure/libucl/Justfile.

























>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
test:
  #!/bin/sh
  mkdir -p _build
  for j in */Justfile; do
    impl=$(dirname $j)
    just -f $j run > _build/$impl
    cmp expected _build/$impl
  done

@clean:
  rm -rf _build
  for j in */Justfile; do just -f $j clean; done

Added src/ffi-adventure/libucl/data.ucl.











>
>
>
>
>
1
2
3
4
5
greeting = "hello world"
vars {
  foo = "this is foo"
  bar = "this is bar"
}

Added src/ffi-adventure/libucl/expected.







>
>
>
1
2
3
hello world
this is foo
this is bar

Added src/ffi-adventure/libucl/libucl-c/Justfile.













>
>
>
>
>
>
1
2
3
4
5
6
@run:
  make > /dev/null
  ./_build/libucl-c

@clean:
  rm -rf _build

Added src/ffi-adventure/libucl/libucl-c/Makefile.







>
>
>
1
2
3
_build/libucl-c: libucl_c.c
	@mkdir -p ${.TARGET:H}
	cc -I/usr/local/include -L/usr/local/lib -lucl -Wall -Werror -o ${.TARGET} ${.ALLSRC}

Added src/ffi-adventure/libucl/libucl-c/libucl_c.c.









































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
#include <stdio.h>
#include <ucl.h>

int main() {
  ucl_object_t* ucl = NULL;
  const char* greeting = NULL;
  const char* foo = NULL;
  const char* bar = NULL;

  struct ucl_parser* parser = ucl_parser_new(UCL_PARSER_DEFAULT);
  ucl_parser_add_file(parser, "../data.ucl");
  if((ucl = ucl_parser_get_object(parser))) {
    ucl_object_tostring_safe(ucl_object_lookup(ucl, "greeting"), &greeting);
    ucl_object_tostring_safe(ucl_object_lookup_path(ucl, "vars.foo"), &foo);
    ucl_object_tostring_safe(ucl_object_lookup_path(ucl, "vars.bar"), &bar);
  }
  puts(greeting);
  puts(foo);
  puts(bar);
}

Added src/ffi-adventure/libucl/libucl-cpp/Justfile.













>
>
>
>
>
>
1
2
3
4
5
6
@run:
  make > /dev/null
  ./_build/libucl-cpp

@clean:
  rm -rf _build

Added src/ffi-adventure/libucl/libucl-cpp/Makefile.







>
>
>
1
2
3
_build/libucl-cpp: libucl_cpp.cpp
	@mkdir -p ${.TARGET:H}
	c++ -I/usr/local/include -L/usr/local/lib -lucl -Wall -Werror -o ${.TARGET} ${.ALLSRC}

Added src/ffi-adventure/libucl/libucl-cpp/libucl_cpp.cpp.

















































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
#include <iostream>
extern "C"
{
#include <ucl.h>
}

int main() {
  ucl_object_t* ucl = NULL;
  const char* greeting = NULL;
  const char* foo = NULL;
  const char* bar = NULL;

  struct ucl_parser* parser = ucl_parser_new(UCL_PARSER_DEFAULT);
  ucl_parser_add_file(parser, "../data.ucl");
  if((ucl = ucl_parser_get_object(parser))) {
    ucl_object_tostring_safe(ucl_object_lookup(ucl, "greeting"), &greeting);
    ucl_object_tostring_safe(ucl_object_lookup_path(ucl, "vars.foo"), &foo);
    ucl_object_tostring_safe(ucl_object_lookup_path(ucl, "vars.bar"), &bar);
  }

  std::cout << greeting << "\n";
  std::cout << foo << "\n";
  std::cout << bar << "\n";
}

Added src/ffi-adventure/libucl/libucl-go/Justfile.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
@run:
  mkdir -p _build
  go build -o _build/libucl-go *.go
  ./_build/libucl-go

@clean:
  rm -rf _build

Added src/ffi-adventure/libucl/libucl-go/libucl_go.go.



























































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
package main

/*
#cgo CFLAGS: -I/usr/local/include
#cgo LDFLAGS: -L/usr/local/lib -lucl
#include <ucl.h>
*/
import "C"
import "fmt"
//import "unsafe"

func main() {
  var greeting *C.char
  var foo *C.char
  var bar *C.char

  var parser = C.ucl_parser_new(C.UCL_PARSER_DEFAULT)
  C.ucl_parser_add_file(parser, C.CString("../data.ucl"))
  var ucl = C.ucl_parser_get_object(parser)
  if(ucl != nil) {
    C.ucl_object_tostring_safe(C.ucl_object_lookup(ucl, C.CString("greeting")), &greeting)
    C.ucl_object_tostring_safe(C.ucl_object_lookup_path(ucl, C.CString("vars.foo")), &foo)
    C.ucl_object_tostring_safe(C.ucl_object_lookup_path(ucl, C.CString("vars.bar")), &bar)
  }

  fmt.Println(C.GoString(greeting))
  fmt.Println(C.GoString(foo))
  fmt.Println(C.GoString(bar))
}

Added src/ffi-adventure/libucl/libucl-zig/Justfile.











>
>
>
>
>
1
2
3
4
5
@run:
  zig build run

@clean:
  rm -rf _build

Added src/ffi-adventure/libucl/libucl-zig/build.zig.











































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
const std = @import("std");

pub fn build(b: *std.build.Builder) void {
    // Standard target options allows the person running `zig build` to choose
    // what target to build for. Here we do not override the defaults, which
    // means any target is allowed, and the default is native. Other options
    // for restricting supported target set are available.
    const target = b.standardTargetOptions(.{});

    // Standard release options allow the person running `zig build` to select
    // between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
    const mode = b.standardReleaseOptions();

    const exe = b.addExecutable("libucl-zig", "src/main.zig");
    exe.setTarget(target);
    exe.setBuildMode(mode);
    exe.addIncludePath("/usr/local/include");
    exe.addLibraryPath("/usr/local/lib");
    exe.linkSystemLibrary("ucl");
    exe.install();

    const run_cmd = exe.run();
    run_cmd.step.dependOn(b.getInstallStep());
    if (b.args) |args| {
        run_cmd.addArgs(args);
    }

    const run_step = b.step("run", "Run the app");
    run_step.dependOn(&run_cmd.step);

    const exe_tests = b.addTest("src/main.zig");
    exe_tests.setTarget(target);
    exe_tests.setBuildMode(mode);

    const test_step = b.step("test", "Run unit tests");
    test_step.dependOn(&exe_tests.step);
}

Added src/ffi-adventure/libucl/libucl-zig/src/main.zig.























































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
const std = @import("std");

const ucl = @cImport({
  @cInclude("ucl.h");
});

pub fn main() !void {
    var parser: ?*ucl.ucl_parser = ucl.ucl_parser_new(ucl.UCL_PARSER_DEFAULT);
    var ucl_obj: ?*ucl.ucl_object_t = null;
    var greeting: [*c] const u8 = null;
    var foo: [*c] const u8 = null;
    var bar: [*c] const u8 = null;

    _ = ucl.ucl_parser_add_file(parser, "../data.ucl");
    ucl_obj = ucl.ucl_parser_get_object(parser);
    if(ucl_obj != null) {
      _ = ucl.ucl_object_tostring_safe(ucl.ucl_object_lookup(ucl_obj, "greeting"), &greeting);
      _ = ucl.ucl_object_tostring_safe(ucl.ucl_object_lookup_path(ucl_obj, "vars.foo"), &foo);
      _ = ucl.ucl_object_tostring_safe(ucl.ucl_object_lookup_path(ucl_obj, "vars.bar"), &bar);
    }

    const stdout_file = std.io.getStdOut().writer();
    var bw = std.io.bufferedWriter(stdout_file);
    const stdout = bw.writer();
    try stdout.print("{s}\n{s}\n{s}\n", .{greeting, foo, bar});
    try bw.flush();
}

Added src/ffi-adventure/libucl/uclcmd/Justfile.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
run:
  #!/bin/sh
  for v in greeting vars.foo vars.bar; do
    uclcmd get -q -f ../data.ucl $v
  done

clean:

Added src/ffi-adventure/memleak/memleak-go/Makefile.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
_build/memleak: memleak.go
	go build -o ${.TARGET} ${.ALLSRC:M*.go}

run: _build/memleak
	./_build/memleak

clean:
	rm -rf _build

Added src/ffi-adventure/memleak/memleak-go/memleak.go.

































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
package main

/*
This attempts to intentionally leak memory, by allocating a
C string but not freeing it when done.

It appears to not be necessary to free the memory, the
garbage collector will pick it up anyway.
*/

/*
#include <stdlib.h>
*/
import "C"

import (
"fmt"
"math"
"runtime"
"runtime/debug"
"time"
//"unsafe"
)

func main() {
	debug.SetGCPercent(-1)
	debug.SetMemoryLimit(math.MaxInt64)

	numstrings := 1000000;
	for i := 0; i < numstrings; i++ {
		makestring(i)
	}
	fmt.Printf("%v strings made\n", numstrings)

	fmt.Println("Before garbage collection:")
	reportMem()

	runtime.GC()

	fmt.Println("after garbage collection:")
	reportMem()
}

func makestring(i int) {
	C.CString(fmt.Sprintf("string %v", i));
	//  defer C.free(unsafe.Pointer(s));
}

func reportMem() {
	loops := 3
	for i := 0; i < loops; i++ {
		fmt.Printf("==== %v/%v\n", i+1, loops)
		printmem()
		time.Sleep(3 * time.Second)
	}
}
func printmem() {
	var m runtime.MemStats
	runtime.ReadMemStats(&m)
	fmt.Printf("HeapAlloc = %v\n", m.HeapAlloc)
	fmt.Printf("HeapInuse = %v\n", m.HeapInuse)
	fmt.Printf("HeapObjects = %v\n", m.HeapObjects)
	fmt.Printf("NumGC = %v\n", m.NumGC)
}

Added src/ffi-adventure/upcase/Makefile.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
all:
	for m in upcase-*/Makefile; do ${MAKE} -C $$(dirname $$m); done

run:
	for m in upcase-*/Makefile; do echo "=== $$(dirname $$m):"; ${MAKE} -C $$(dirname $$m) run; done

clean:
	for m in upcase-*/Makefile; do ${MAKE} -C $$(dirname $$m) clean; done

Added src/ffi-adventure/upcase/upcase-c/Makefile.



























>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
_build/upcase: upcase.c _build/libupcase.a libupcase.h
	@mkdir -p ${.TARGET:H}
	cc -o ${.TARGET} -Wall -Werror -L_build -lupcase upcase.c

_build/libupcase.a: libupcase.c libupcase.h
	@mkdir -p ${.TARGET:H}
	cc -o ${.TARGET} -Wall -Werror -c -fPIC libupcase.c

run: _build/upcase
	./_build/upcase

clean:
	rm -rf _build

Added src/ffi-adventure/upcase/upcase-c/libupcase.c.





































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
#include "libupcase.h"

int upcase(char *s) {
  while(*s) {
    *s = toupper(*s);
    s++;
  }
  return 1;
}

int upcase_n(char *s, int len) {
  int i = 0;
  while(i++ < len) {
    *s = toupper(*s);
    s++;
  }
  return 1;
}    

Added src/ffi-adventure/upcase/upcase-c/libupcase.h.



















>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
#include <ctype.h>

#ifndef libupcase_h
#define libupcase_h

int upcase(char *);
int upcase_n(char *, int);

#endif

Added src/ffi-adventure/upcase/upcase-c/upcase.c.



























>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include "libupcase.h"

int main() {
  char *s = strdup("hello c");
  if(upcase(s)) {
    printf("%s\n", s);
  }
  free(s);
  return 0;
}

Added src/ffi-adventure/upcase/upcase-crystal/Makefile.



















>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
_build/upcase: upcase.cr
	@mkdir -p ${.TARGET:H}
	crystal build -o ${.TARGET} upcase.cr

run: _build/upcase
	./_build/upcase

clean:
	rm -rf _build

Added src/ffi-adventure/upcase/upcase-crystal/upcase.cr.























































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
@[Link(lib: "upcase", ldflags: "-L#{__DIR__}/../upcase-c/_build")]
lib LibUpcase
  fun upcase(name : UInt8*) : UInt32
  fun upcase_n(name : UInt8*, len : UInt32) : UInt32
end

class CString
  @bytes : Slice(UInt8)

  def initialize(str : String)
    @bytes = Slice(UInt8).new(str.size + 1)
    str.to_slice.copy_to(@bytes)
    @bytes[-1] = 0
  end

  def to_unsafe
    @bytes.to_unsafe
  end

  def to_s(io)
    io << String.new(@bytes)
  end
end

str = CString.new("hello crystal")
LibUpcase.upcase(str)
puts str

Added src/ffi-adventure/upcase/upcase-d/Makefile.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
_build/upcase: upcase.d
	ldmd2 -L=-L../upcase-c/_build -L=-lupcase -of=${.TARGET} ${.ALLSRC}

run: _build/upcase
	./_build/upcase

clean:
	rm -rf _build

Added src/ffi-adventure/upcase/upcase-d/upcase.d.























>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
extern (C) void upcase(char *);

import std.stdio;
import std.string;

void main() {
  auto s = "hello d";
  auto cs = cast (char *) std.string.toStringz(s);
  upcase(cs);
  writeln(std.string.fromStringz(cs));
}

Added src/ffi-adventure/upcase/upcase-go/Makefile.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
_build/upcase: upcase.go
	go build -o ${.TARGET} ${.ALLSRC:M*.go}

run: _build/upcase
	./_build/upcase

clean:
	rm -rf _build

Added src/ffi-adventure/upcase/upcase-go/upcase.go.







































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
package main

/*
#cgo CFLAGS: -I../upcase-c
#cgo LDFLAGS: -L../upcase-c/_build -lupcase
#include <stdlib.h>
#include "libupcase.h"
*/
import "C"
import "unsafe"
import "fmt"

func main() {
	hello := C.CString("hello go")
	defer C.free(unsafe.Pointer(hello))

	C.upcase(hello)
	fmt.Println(C.GoString(hello))
}

Added src/ffi-adventure/upcase/upcase-nim/Makefile.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
_build/upcase: upcase.nim
	nim c -o:upcase --outdir:_build --cincludes:../upcase-c --clibdir:../upcase-c/_build -l:-lupcase upcase.nim

run: _build/upcase
	./_build/upcase

clean:
	rm -rf _build

Added src/ffi-adventure/upcase/upcase-nim/upcase.nim.













>
>
>
>
>
>
1
2
3
4
5
6
# header may be optional since it can find the symbol in lib
proc upcase(name: cstring) {.header: "libupcase.h", importc: "upcase"}

var s = "hello nim"
upcase(cstring(s))
echo(s)

Added src/ffi-adventure/upcase/upcase-pony/Makefile.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
_build/upcase: upcase.pony
	ponyc -o ${.TARGET:H} -b ${.TARGET:T} .

run: _build/upcase
	./_build/upcase

clean:
	rm -rf _build

Added src/ffi-adventure/upcase/upcase-pony/upcase.pony.



















>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
use "path:../upcase-c/_build"
use "lib:upcase"
use @upcase[I32](name: Pointer[U8] tag)

actor Main
  new create(env: Env) =>
    var s: String = recover "hello pony".clone() end
    @upcase(s.cstring())
    env.out.print(s)

Added src/ffi-adventure/upcase/upcase-rust/.cargo/config.toml.





>
>
1
2
[build]
target-dir = "_build"

Added src/ffi-adventure/upcase/upcase-rust/.gitignore.



>
1
/target

Added src/ffi-adventure/upcase/upcase-rust/Cargo.lock.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3

[[package]]
name = "upcase-rust"
version = "0.1.0"

Added src/ffi-adventure/upcase/upcase-rust/Cargo.toml.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
[package]
name = "upcase-rust"
version = "0.1.0"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]

Added src/ffi-adventure/upcase/upcase-rust/Makefile.





















>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
.PHONY: all run clean

all:
	cargo build

run:
	cargo run

clean:
	cargo clean

Added src/ffi-adventure/upcase/upcase-rust/build.rs.









>
>
>
>
1
2
3
4
fn main() {
    println!("cargo:rustc-link-search=../upcase-c/_build");
    println!("cargo:rustc-link-lib=upcase");
}

Added src/ffi-adventure/upcase/upcase-rust/src/main.rs.









































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
fn main() {
    let mut s = String::from("hello rust");
    do_upcase(&mut s);
    println!("{}", s);
}

fn do_upcase(s: &mut String) {
    unsafe {
        let vec = s.as_mut_vec();
        vec.reserve(1);
        vec.push(0);
        upcase(vec.as_mut_ptr());
        vec.pop();
    };
}

#[link(name = "upcase", kind = "static")]
extern "C" {
    fn upcase(s: *mut u8);
}

Added src/ffi-adventure/upcase/upcase-v/Makefile.



















>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
_build/upcase: upcase.v
	@mkdir -p ${.TARGET:H}
	v -o ${.TARGET} ${.ALLSRC}

run: _build/upcase
	./_build/upcase

clean:
	rm -rf _build

Added src/ffi-adventure/upcase/upcase-v/upcase.v.

























>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
#flag -lupcase
#flag -L../upcase-c/_build
#flag -I../upcase-c
#include "libupcase.h"

fn C.upcase(&char) int

fn main() {
  s := "hello v".clone()
  C.upcase(s.str)
  println(s)
}

Added src/fossil-examples/README.md.

























>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
# Fossil Examples

Test scripts to demonstrate Fossil behavior.

## Move file in merge (fossil-move-file-in-merge.sh)

Q: How does Fossil display file moves in merge commits?

A: "Name change from `orig_name` to `subdir/new_name`

I was concerned that the merge commit would contain the entire contents of the new file.
Instead, it simply shows the old name and new name.

Added src/fossil-examples/move-file-in-merge.sh.



















































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
#!/bin/sh
set -e
work=$(mktemp -d -t fossil-move-file-in-merge)

cd $work
fossil new f.fossil
fossil open f.fossil

echo 'file1' > file1
fossil add file1
fossil commit -m 'add file1'

echo 'file2' > file2
fossil add file2
fossil commit --branch other -m 'addfile2'

mkdir subdir
fossil mv --hard file1 subdir/
fossil commit -m 'move file1 to subdir'

fossil up trunk
fossil merge other
fossil commit -m 'merge other'

echo $work

Added src/fossil-import-test.sh.



















































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
#!/bin/sh
set -e

# Keep a fossil repo current with a git repo

d=$(mktemp -d -t fossil-git)
f=$d/g.fossil
cd $d

mkdir g
cd g
git init
git commit -m '0' --allow-empty

git fast-export --all | fossil import --git $f

echo 'foo 1' >> foo
echo 'bar 1' >> bar
git add foo bar
git commit -m '1'

echo 'foo 2' >> foo
echo 'bar 2' >> bar
git add foo bar
git commit -m '2'

git fast-export --all | fossil import --git -i $f

echo "=== after import"
fossil timeline -R $f

echo "=== amend main to trunk"
commits=$(fossil sql -R $f "SELECT a.hash FROM artifact a JOIN tagxref x ON a.rid=x.rid JOIN tag t ON t.tagid=x.tagid JOIN event e ON a.rid=e.objid WHERE t.tagname='sym-main' ORDER BY a.rid ASC LIMIT 2;" | tr -d \')
first=$(echo "$commits" | head -n 1)
second=$(echo "$commits" | tail -n 1)

fossil amend $second --date '2000-01-01' -R $f
fossil amend $first --branch trunk -R $f
fossil timeline -R $f
fossil branch list -R $f
echo $d

Added src/frank/Kyuafile.











>
>
>
>
>
1
2
3
4
5
syntax(2)

test_suite('frank')

include('tests/Kyuafile')

Added src/frank/README.md.



























































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
# frank - mash branches together

## Usage

```
$ frank status

branch1 up to date
branch2 out of date
```

```
$ frank status

branch1 up to date

$ frank fetch
$ frank status

branch1 out of date

$ frank up

branch 1 up to date
```

## Config

```
remotes {
  freebsd {
    url: "https://github.com/freebsd/freebsd-ports.git"
  }

  patmaddox {
    url: "git@github.com:patmaddox/freebsd-ports.git"
  }
}

branches {
  main {
    from = "freebsd"
	to = "patmaddox"
	strategy = "ff"
  }
  
  patmaddox-ports {
    base = "main"
	remote = "patmaddox"
	strategy = "reset"
	branches = [
	  "poudriere-devel",
	  "www-vultr-cli",
	]
  }
  
  poudriere-devel {
    base = "main"
	remote = "patmaddox"
	strategy = "rebase"
  }
  
  www-vultr-cli {
    base = "main"
	remote = "patmaddox"
	strategy = "rebase"
  }
}
```

## Testing

`kyua test`

If a test fails:

`kyua debug test_suite.sh:test_case`

Added src/frank/bin/frank.





















































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
#!/bin/sh
set -e
cmd=${1:-usage}

cmd_status() {
    if [ -s .frankconfig ]; then
	dir_error=0
	for b in $(branches); do
	    b_stat=$(branch_status $b)
	    echo "$b $b_stat"
	    if [ $b_stat != "ok" ]; then
		dir_error=1
	    fi
	done

	if [ $dir_error -eq 1 ]; then exit 1; fi
    fi
}

cmd_up() {
    for b in $(branches); do
	if [ ! -d $b ]; then
	    if [ $b = "main" ]; then
		git clone $(remote $(branch_remote $b)) $b
	    else
		cd main
		git worktree add ../$b
	    fi
	fi
    done
}

branches() {
    uclget .branches
}

branch_status() {
    b=${1:?missing branch param}
    if [ -d $b ]; then
	echo ok
    else
	echo missing
    fi
}

branch_remote() {
    uclget .branches.${1:?missing branch name}.remote
}

remote() {
    uclget .remotes.${1:?missing remote name}
}

uclget() {
    uclcmd get -l -e -f .frankconfig ${1:?missing ucl key} | head -n 1 | sed -e 's/^"//' -e 's/"$//'
}

cmd_$cmd

Added src/frank/tests/Kyuafile.













>
>
>
>
>
>
1
2
3
4
5
6
syntax(2)

test_suite('frank')

atf_test_program{name='test_status.sh'}
atf_test_program{name='test_up.sh'}

Added src/frank/tests/test_status.sh.



























































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
#! /usr/bin/env atf-sh
set -e

frank=$(atf_get_srcdir)/../bin/frank

## test cases
atf_init_test_cases() {
    atf_add_test_case no_config
    atf_add_test_case empty_config
    atf_add_test_case branch_dir_present
    atf_add_test_case branch_dir_not_present
}

## no config
atf_test_case no_config

no_config_body() {
    echo $frank
    atf_check -s exit:0 -o empty $frank status
}

## empty config
atf_test_case empty_config

empty_config_body() {
    touch .frankconfig
    atf_check -s exit:0 -o empty $frank status
}

## branch dir present
atf_test_case branch_dir_present

branch_dir_present_body() {
    mkdir present1 present2
    cat <<EOF > .frankconfig
branches {
  present1 {}
  present2 {}
}
EOF

    atf_check -s exit:0 -o match:"present1 ok" $frank status
    atf_check -s exit:0 -o match:"present2 ok" $frank status
}

## branch dir not present
atf_test_case branch_dir_not_present

branch_dir_not_present_body() {
    mkdir present

    cat <<EOF > .frankconfig
branches {
  present {}
  not_present {}
}
EOF

    atf_check -s exit:1 -o match:"present ok" $frank status
    atf_check -s exit:1 -o match:"not_present missing" $frank status
}

Added src/frank/tests/test_up.sh.







































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
#! /usr/bin/env atf-sh
set -e

frank=$(atf_get_srcdir)/../bin/frank

## test cases
atf_init_test_cases() {
    atf_add_test_case main
    atf_add_test_case worktree
    atf_add_test_case double_up
}

## main
atf_test_case main

main_body() {
    cat <<EOF > .frankconfig
remotes {
  local = "source-repo"
}

branches {
  main {
    remote = "local"
  }
}
EOF
    
    git_hello

    atf_check -s exit:1 -o match:"main missing" $frank status
    atf_check -s exit:0 -e ignore $frank up
    atf_check -s exit:0 -o match:"main ok" $frank status
}

## worktree
atf_test_case worktree

worktree_body() {
    cat <<EOF > .frankconfig
remotes {
  local = "source-repo"
}

branches {
  main {
    remote = "local"
  }

  branch1 {}
}
EOF
    
    git_hello

    atf_check -s exit:1 -o match:"branch1 missing" $frank status
    atf_check -s exit:0 -o ignore -e ignore $frank up
    atf_check -s exit:0 -o match:"branch1 ok" $frank status
}

## double_up
atf_test_case double_up

double_up_body() {
    cat <<EOF > .frankconfig
remotes {
  local = "source-repo"
}

branches {
  main {
    remote = "local"
  }
}
EOF
    
    git_hello

    atf_check -s exit:1 -o match:"main missing" $frank status
    atf_check -s exit:0 -o ignore -e ignore $frank up
    atf_check -s exit:0 $frank up
    atf_check -s exit:0 -o match:"main ok" $frank status
}

## helpers
git_hello() {
    export GIT_AUTHOR_NAME=kyua
    export GIT_AUTHOR_EMAIL=kyua
    export GIT_COMMITTER_NAME=kyua
    export GIT_COMMITTER_EMAIL=kyua

    mkdir source-repo && cd source-repo
    which git
    git init -b main
    echo "hello world" > hello
    git add hello
    git commit -m "hello"
    cd ..
}

Added src/gauntlet/.envrc.



>
1
export BSDTESTS=/home/patmaddox/freebsd-releng-13.2/usr/tests

Added src/gauntlet/Kyuafile.









































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
-- $FreeBSD$
--
-- Copyright 2011 Google Inc.
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are
-- met:
--
-- * Redistributions of source code must retain the above copyright
--   notice, this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
--   notice, this list of conditions and the following disclaimer in the
--   documentation and/or other materials provided with the distribution.
-- * Neither the name of Google Inc. nor the names of its contributors
--   may be used to endorse or promote products derived from this software
--   without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-- "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-- OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

-- Automatically recurses into any subdirectory that holds a Kyuafile.
-- As such, this Kyuafile is suitable for installation into the root of
-- the tests hierarchy as well as into any other subdirectory that needs
-- "auto-discovery" of tests.
--
-- This file is based on the Kyuafile.top sample file distributed in the
-- kyua-cli package.

syntax(2)

local directory = fs.dirname(current_kyuafile())
for file in fs.files(directory) do
    if file == "." or file == ".." then
        -- Skip these special entries.
    else
        local kyuafile_relative = fs.join(file, "Kyuafile")
        local kyuafile_absolute = fs.join(directory, kyuafile_relative)
        if fs.exists(kyuafile_absolute) then
            include(kyuafile_relative)
        end
    end
end

Added src/gauntlet/freebsd/Justfile.

















































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
help:
  just -l

[no-cd]
init:
  #!/bin/sh
  set -e
  : ${BSDTESTS:?}
  bin=$(basename $(pwd))
  bindir=$(basename $(dirname $(pwd)))
  cat $BSDTESTS/$bindir/$bin/Kyuafile |
    grep -v -- '-- Automatically generated' |
    sed -e '2,$b' -e '/^$/d' |
    sed -e 's/^test_suite("FreeBSD")/test_suite("gauntlet")/' \
    > Kyuafile
  tests=$(grep -E -o 'atf_test_program\{name="([[:alpha:]]|_)*"' Kyuafile | sed -E -e 's/.*"(.*)".*/\1/')
  for t in $tests; do
    cat > ${t}_ref <<EOF
  #! /usr/bin/env atf-sh
  set -e

  . \${BSDTESTS}/$bindir/$bin/$t
  EOF
    chmod +x ${t}_ref
    sed -i '' -e "s/atf_test_program{name=\"$t\"/atf_test_program{name=\"${t}_ref\"/" Kyuafile
  done

[no-cd]
impl name:
  #!/bin/sh
  set -e
  name="{{name}}"
  grep -E 'atf_test_program\{name="([[:alpha:]]|_)*_ref"' Kyuafile  | sed -E "s/(.*\{name=\")(([[:alpha:]]|_)*)_ref\"(.*)/\1\2_${name}\"\4/" >> Kyuafile

  for f in *_ref; do
    newname=$(echo $f | sed -E -e "s/(.*)_ref$/\1_$name/")
    cat $f | sed -e '0,$b' -e "/^$/a\\
  PATH=\$(realpath \$(atf_get_srcdir))/$name/_build:\$PATH" > $newname
  chmod +x $newname
  done

Added src/gauntlet/freebsd/Kyuafile.









































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
-- $FreeBSD$
--
-- Copyright 2011 Google Inc.
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are
-- met:
--
-- * Redistributions of source code must retain the above copyright
--   notice, this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
--   notice, this list of conditions and the following disclaimer in the
--   documentation and/or other materials provided with the distribution.
-- * Neither the name of Google Inc. nor the names of its contributors
--   may be used to endorse or promote products derived from this software
--   without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-- "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-- OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

-- Automatically recurses into any subdirectory that holds a Kyuafile.
-- As such, this Kyuafile is suitable for installation into the root of
-- the tests hierarchy as well as into any other subdirectory that needs
-- "auto-discovery" of tests.
--
-- This file is based on the Kyuafile.top sample file distributed in the
-- kyua-cli package.

syntax(2)

local directory = fs.dirname(current_kyuafile())
for file in fs.files(directory) do
    if file == "." or file == ".." then
        -- Skip these special entries.
    else
        local kyuafile_relative = fs.join(file, "Kyuafile")
        local kyuafile_absolute = fs.join(directory, kyuafile_relative)
        if fs.exists(kyuafile_absolute) then
            include(kyuafile_relative)
        end
    end
end

Added src/gauntlet/freebsd/bin/Kyuafile.









































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
-- $FreeBSD$
--
-- Copyright 2011 Google Inc.
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are
-- met:
--
-- * Redistributions of source code must retain the above copyright
--   notice, this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
--   notice, this list of conditions and the following disclaimer in the
--   documentation and/or other materials provided with the distribution.
-- * Neither the name of Google Inc. nor the names of its contributors
--   may be used to endorse or promote products derived from this software
--   without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-- "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-- OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

-- Automatically recurses into any subdirectory that holds a Kyuafile.
-- As such, this Kyuafile is suitable for installation into the root of
-- the tests hierarchy as well as into any other subdirectory that needs
-- "auto-discovery" of tests.
--
-- This file is based on the Kyuafile.top sample file distributed in the
-- kyua-cli package.

syntax(2)

local directory = fs.dirname(current_kyuafile())
for file in fs.files(directory) do
    if file == "." or file == ".." then
        -- Skip these special entries.
    else
        local kyuafile_relative = fs.join(file, "Kyuafile")
        local kyuafile_absolute = fs.join(directory, kyuafile_relative)
        if fs.exists(kyuafile_absolute) then
            include(kyuafile_relative)
        end
    end
end

Added src/gauntlet/freebsd/bin/cp/Kyuafile.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
-- Automatically generated by mk_tests.sh.

syntax(2)

test_suite("gauntlet")

atf_test_program{name="test_cp_test_ref", }
atf_test_program{name="test_cp_test_elixir", }

Added src/gauntlet/freebsd/bin/cp/Makefile.



















>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
ROOT=		../..
IMPLS=		elixir # rust go

all: sparse

sparse: ${BSDTESTS}/bin/cp/sparse
	cp ${.ALLSRC} ${.TARGET}

.include "${ROOT}/kyua.mk"

Added src/gauntlet/freebsd/bin/cp/elixir/Makefile.







>
>
>
1
2
3
_build/cp! cp.exs
	@mkdir -p ${.TARGET:H}
	cp ${.ALLSRC} ${.TARGET}

Added src/gauntlet/freebsd/bin/cp/elixir/cp.exs.







>
>
>
1
2
3
#!/usr/bin/env elixir
[from, to] = System.argv()
File.cp(from, to)

Added src/gauntlet/freebsd/bin/cp/test_cp_test_elixir.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
#! /usr/bin/env atf-sh
# automatically generated by mk_tests.sh
set -e

export LC_ALL=C.UTF-8 # otherwise VM complains about incorrect locale
PATH=$(realpath $(atf_get_srcdir))/elixir/_build:$PATH
. $(atf_get_srcdir)/test_cp_test_ref

Added src/gauntlet/freebsd/bin/cp/test_cp_test_ref.

























































































































































































































































































































































































































































































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
#! /usr/libexec/atf-sh
#
# SPDX-License-Identifier: BSD-2-Clause-FreeBSD
#
# Copyright (c) 2020 Kyle Evans <kevans@FreeBSD.org>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
#    notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
#    notice, this list of conditions and the following disclaimer in the
#    documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
# $FreeBSD$

check_size()
{
	file=$1
	sz=$2

	atf_check -o inline:"$sz\n" stat -f '%z' $file
}

atf_test_case basic
basic_body()
{
	echo "foo" > bar

	atf_check cp bar baz
	check_size baz 4
}

atf_test_case basic_symlink
basic_symlink_body()
{
	echo "foo" > bar
	ln -s bar baz

	atf_check cp baz foo
	atf_check test '!' -L foo

	atf_check -e inline:"cp: baz and baz are identical (not copied).\n" \
	    -s exit:1 cp baz baz
	atf_check -e inline:"cp: bar and baz are identical (not copied).\n" \
	    -s exit:1 cp baz bar
}

atf_test_case chrdev
chrdev_body()
{
	echo "foo" > bar

	check_size bar 4
	atf_check cp /dev/null trunc
	check_size trunc 0
	atf_check cp bar trunc
	check_size trunc 4
	atf_check cp /dev/null trunc
	check_size trunc 0
}

atf_test_case matching_srctgt
matching_srctgt_body()
{

	# PR235438: `cp -R foo foo` would previously infinitely recurse and
	# eventually error out.
	mkdir foo
	echo "qux" > foo/bar
	cp foo/bar foo/zoo

	atf_check cp -R foo foo
	atf_check -o inline:"qux\n" cat foo/foo/bar
	atf_check -o inline:"qux\n" cat foo/foo/zoo
	atf_check -e not-empty -s not-exit:0 stat foo/foo/foo
}

atf_test_case matching_srctgt_contained
matching_srctgt_contained_body()
{

	# Let's do the same thing, except we'll try to recursively copy foo into
	# one of its subdirectories.
	mkdir foo
	ln -s foo coo
	echo "qux" > foo/bar
	mkdir foo/moo
	touch foo/moo/roo
	cp foo/bar foo/zoo

	atf_check cp -R foo foo/moo
	atf_check cp -RH coo foo/moo
	atf_check -o inline:"qux\n" cat foo/moo/foo/bar
	atf_check -o inline:"qux\n" cat foo/moo/coo/bar
	atf_check -o inline:"qux\n" cat foo/moo/foo/zoo
	atf_check -o inline:"qux\n" cat foo/moo/coo/zoo

	# We should have copied the contents of foo/moo before foo, coo started
	# getting copied in.
	atf_check -o not-empty stat foo/moo/foo/moo/roo
	atf_check -o not-empty stat foo/moo/coo/moo/roo
	atf_check -e not-empty -s not-exit:0 stat foo/moo/foo/moo/foo
	atf_check -e not-empty -s not-exit:0 stat foo/moo/coo/moo/coo
}

atf_test_case matching_srctgt_link
matching_srctgt_link_body()
{

	mkdir foo
	echo "qux" > foo/bar
	cp foo/bar foo/zoo

	atf_check ln -s foo roo
	atf_check cp -RH roo foo
	atf_check -o inline:"qux\n" cat foo/roo/bar
	atf_check -o inline:"qux\n" cat foo/roo/zoo
}

atf_test_case matching_srctgt_nonexistent
matching_srctgt_nonexistent_body()
{

	# We'll copy foo to a nonexistent subdirectory; ideally, we would
	# skip just the directory and end up with a layout like;
	#
	# foo/
	#     bar
	#     dne/
	#         bar
	#         zoo
	#     zoo
	#
	mkdir foo
	echo "qux" > foo/bar
	cp foo/bar foo/zoo

	atf_check cp -R foo foo/dne
	atf_check -o inline:"qux\n" cat foo/dne/bar
	atf_check -o inline:"qux\n" cat foo/dne/zoo
	atf_check -e not-empty -s not-exit:0 stat foo/dne/foo
}

recursive_link_setup()
{
	extra_cpflag=$1

	mkdir -p foo/bar
	ln -s bar foo/baz

	mkdir foo-mirror
	eval "cp -R $extra_cpflag foo foo-mirror"
}

atf_test_case recursive_link_dflt
recursive_link_dflt_body()
{
	recursive_link_setup

	# -P is the default, so this should work and preserve the link.
	atf_check cp -R foo foo-mirror
	atf_check test -L foo-mirror/foo/baz
}

atf_test_case recursive_link_Hflag
recursive_link_Hflag_body()
{
	recursive_link_setup

	# -H will not follow either, so this should also work and preserve the
	# link.
	atf_check cp -RH foo foo-mirror
	atf_check test -L foo-mirror/foo/baz
}

atf_test_case recursive_link_Lflag
recursive_link_Lflag_body()
{
	recursive_link_setup -L

	# -L will work, but foo/baz ends up expanded to a directory.
	atf_check test -d foo-mirror/foo/baz -a \
	    '(' ! -L foo-mirror/foo/baz ')'
	atf_check cp -RL foo foo-mirror
	atf_check test -d foo-mirror/foo/baz -a \
	    '(' ! -L foo-mirror/foo/baz ')'
}

file_is_sparse()
{
	atf_check ${0%/*}/sparse "$1"
}

files_are_equal()
{
	atf_check test "$(stat -f "%d %i" "$1")" != "$(stat -f "%d %i" "$2")"
	atf_check cmp "$1" "$2"
}

atf_test_case sparse_leading_hole
sparse_leading_hole_body()
{
	# A 16-megabyte hole followed by one megabyte of data
	truncate -s 16M foo
	seq -f%015g 65536 >>foo
	file_is_sparse foo

	atf_check cp foo bar
	files_are_equal foo bar
	file_is_sparse bar
}

atf_test_case sparse_multiple_holes
sparse_multiple_holes_body()
{
	# Three one-megabyte blocks of data preceded, separated, and
	# followed by 16-megabyte holes
	truncate -s 16M foo
	seq -f%015g 65536 >>foo
	truncate -s 33M foo
	seq -f%015g 65536 >>foo
	truncate -s 50M foo
	seq -f%015g 65536 >>foo
	truncate -s 67M foo
	file_is_sparse foo

	atf_check cp foo bar
	files_are_equal foo bar
	file_is_sparse bar
}

atf_test_case sparse_only_hole
sparse_only_hole_body()
{
	# A 16-megabyte hole
	truncate -s 16M foo
	file_is_sparse foo

	atf_check cp foo bar
	files_are_equal foo bar
	file_is_sparse bar
}

atf_test_case sparse_to_dev
sparse_to_dev_body()
{
	# Three one-megabyte blocks of data preceded, separated, and
	# followed by 16-megabyte holes
	truncate -s 16M foo
	seq -f%015g 65536 >>foo
	truncate -s 33M foo
	seq -f%015g 65536 >>foo
	truncate -s 50M foo
	seq -f%015g 65536 >>foo
	truncate -s 67M foo
	file_is_sparse foo

	atf_check -o file:foo cp foo /dev/stdout
}

atf_test_case sparse_trailing_hole
sparse_trailing_hole_body()
{
	# One megabyte of data followed by a 16-megabyte hole
	seq -f%015g 65536 >foo
	truncate -s 17M foo
	file_is_sparse foo

	atf_check cp foo bar
	files_are_equal foo bar
	file_is_sparse bar
}

atf_test_case standalone_Pflag
standalone_Pflag_body()
{
	echo "foo" > bar
	ln -s bar foo

	atf_check cp -P foo baz
	atf_check -o inline:'Symbolic Link\n' stat -f %SHT baz
}

atf_init_test_cases()
{
	atf_add_test_case basic
	atf_add_test_case basic_symlink
	atf_add_test_case chrdev
	atf_add_test_case matching_srctgt
	atf_add_test_case matching_srctgt_contained
	atf_add_test_case matching_srctgt_link
	atf_add_test_case matching_srctgt_nonexistent
	atf_add_test_case recursive_link_dflt
	atf_add_test_case recursive_link_Hflag
	atf_add_test_case recursive_link_Lflag
	atf_add_test_case sparse_leading_hole
	atf_add_test_case sparse_multiple_holes
	atf_add_test_case sparse_only_hole
	atf_add_test_case sparse_to_dev
	atf_add_test_case sparse_trailing_hole
	atf_add_test_case standalone_Pflag
}

Added src/gauntlet/freebsd/kyua.mk.





























































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
BINNAME=	${.CURDIR:T}
BINDIR=		${.CURDIR:H:T}
FULLBIN=	${BINDIR}/${BINNAME}
MAKEFILES!=	ls */Makefile

.MAIN: all

.PHONY: all clean

all:
clean:

Kyuafile!
	rm -f test_*
	${ROOT}/mk_kyua.sh ${FULLBIN} "${IMPLS}"

.for f in ${MAKEFILES}
all: ${f}
${f}!
	${MAKE} -C ${f:H}
.endfor

.for i in ${IMPLS}
clean: clean-${i}
clean-${i}:
	rm -rf ${i}/_build
	mkdir ${i}/_build
	printf "#!/bin/sh\nfalse" > ${i}/_build/${BINNAME}
	chmod +x ${i}/_build/${BINNAME}
.endfor

Added src/gauntlet/freebsd/mk_kyua.sh.























































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
#!/bin/sh
set -e

: ${BSDTESTS:?}
fullbin=$1
impls=$2

cat $BSDTESTS/$fullbin/Kyuafile |
    sed -e 's/generated by bsd.test.mk/generated by mk_tests.sh/' |
#    sed -e '2,$b' -e '/^$/d' |
    sed -e 's/^test_suite("FreeBSD")/test_suite("gauntlet")/' \
	> Kyuafile

tests=$(grep -E -o 'atf_test_program\{name="([[:alpha:]]|_)*"' Kyuafile | sed -E -e 's/.*"(.*)".*/\1/')
for t in $tests; do
    testfile=test_${t}_ref
    cat $BSDTESTS/$fullbin/$t > $testfile
    chmod +x $testfile
    sed -i '' -e "s/atf_test_program{name=\"$t\"/atf_test_program{name=\"$testfile\"/" Kyuafile
done

for i in $impls; do
    grep -E 'atf_test_program\{name="([[:alpha:]]|_)*_ref"' Kyuafile | sed -E "s/(.*\{name=\")(([[:alpha:]]|_)*)_ref\"(.*)/\1\2_${i}\"\4/" >> Kyuafile

    for f in *_ref; do
	newname=$(echo $f | sed -E -e "s/(.*)_ref$/\1_$i/")
	if [ "$i" = "elixir" ]; then
	    lcline='export LC_ALL=C.UTF-8 # otherwise VM complains about incorrect locale'
	fi
	cat > $newname <<EOF
#! /usr/bin/env atf-sh
# automatically generated by mk_tests.sh
set -e

$lcline
PATH=\$(realpath \$(atf_get_srcdir))/$i/_build:\$PATH
. \$(atf_get_srcdir)/$f
EOF
#	cat $f | sed -e '0,$b' -e "/^$/a\\
#PATH=\$(realpath \$(atf_get_srcdir))/$i/_build:\$PATH" > $newname
	chmod +x $newname
    done
done

Added src/gauntlet/games/Kyuafile.









































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
-- $FreeBSD$
--
-- Copyright 2011 Google Inc.
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are
-- met:
--
-- * Redistributions of source code must retain the above copyright
--   notice, this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
--   notice, this list of conditions and the following disclaimer in the
--   documentation and/or other materials provided with the distribution.
-- * Neither the name of Google Inc. nor the names of its contributors
--   may be used to endorse or promote products derived from this software
--   without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-- "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-- OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

-- Automatically recurses into any subdirectory that holds a Kyuafile.
-- As such, this Kyuafile is suitable for installation into the root of
-- the tests hierarchy as well as into any other subdirectory that needs
-- "auto-discovery" of tests.
--
-- This file is based on the Kyuafile.top sample file distributed in the
-- kyua-cli package.

syntax(2)

local directory = fs.dirname(current_kyuafile())
for file in fs.files(directory) do
    if file == "." or file == ".." then
        -- Skip these special entries.
    else
        local kyuafile_relative = fs.join(file, "Kyuafile")
        local kyuafile_absolute = fs.join(directory, kyuafile_relative)
        if fs.exists(kyuafile_absolute) then
            include(kyuafile_relative)
        end
    end
end

Added src/gauntlet/games/simpleguess/Kyuafile.













>
>
>
>
>
>
1
2
3
4
5
6
syntax(2)

test_suite("gauntlet")

atf_test_program{name="test_simpleguess_c"}
atf_test_program{name="test_simpleguess_elixir"}

Added src/gauntlet/games/simpleguess/c/Makefile.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
.PHONY: run test

_build/simpleguess: simpleguess.c
	@mkdir -p ${.TARGET:H}
	cc -o ${.TARGET} -Wall -Werror ${.ALLSRC}

run: _build/simpleguess
	./${.ALLSRC}

Added src/gauntlet/games/simpleguess/c/simpleguess.c.































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
#include <stdio.h>
#include <string.h>

int main(int argc, char **argv)
{
  char guess[2];

  printf("Guess your number: ");
  scanf("%1s", guess);
  if(strcmp(guess, argv[1]) == 0) {
    printf("right!\n");
  } else {
    printf("wrong :(\n");
  }
}

Added src/gauntlet/games/simpleguess/elixir/Makefile.







>
>
>
1
2
3
_build/simpleguess! simpleguess.exs
	@mkdir -p ${.TARGET:H}
	cp ${.ALLSRC} ${.TARGET}

Added src/gauntlet/games/simpleguess/elixir/simpleguess.exs.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
#!/usr/bin/env elixir
[correct | _] = System.argv()
guess = IO.gets("Guess your number: ") |> String.trim()
if guess == correct do
  IO.puts("right!")
else
  IO.puts("wrong :(")
end

Added src/gauntlet/games/simpleguess/test_simpleguess_c.











>
>
>
>
>
1
2
3
4
5
#! /usr/libexec/atf-sh
set -e

PATH=$(realpath $(atf_get_srcdir))/c/_build:$PATH
. $(atf_get_srcdir)/test_simpleguess_shared

Added src/gauntlet/games/simpleguess/test_simpleguess_elixir.













>
>
>
>
>
>
1
2
3
4
5
6
#! /usr/libexec/atf-sh
set -e

export LC_ALL=C.UTF-8 # otherwise VM complains about incorrect locale
PATH=$(realpath $(atf_get_srcdir))/elixir/_build:$PATH
. $(atf_get_srcdir)/test_simpleguess_shared

Added src/gauntlet/games/simpleguess/test_simpleguess_shared.

















































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
atf_init_test_cases()
{
    atf_add_test_case 1_right
    atf_add_test_case 2_wrong
    # wrong then right
}

atf_test_case 1_right
1_right_body()
{
    cat > input <<EOF
3
EOF
    atf_check -o inline:"Guess your number: right!\n" simpleguess 3 < input
}

atf_test_case 2_wrong
2_wrong_body()
{
    cat > input <<EOF
1
EOF
    atf_check -o inline:"Guess your number: wrong :(\n" simpleguess 3 < input
}

Added src/infra/gulliver/Makefile.































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
# check out freebsd source code
# make buildworld buildkernel
# make packages
# create a ramdisk (or file)
# create a zpool from the file
# mount the zpool to a dir
# snapshot zpool
# cat snapshot to disk
# manually import snapshot as be

NAME=		beastie
BRANCH?=	main
REPODIR?=	/packages/${BRANCH}
GITSRC=		${.CURDIR}/freebsd-src.git/b/${BRANCH}
GITREPO=	https://github.com/freebsd/freebsd-src.git
BUILDDIR=	${.CURDIR}/_build
MAKESRC=	MAKELEVEL=0 make -C ${GITSRC} -j $$(sysctl -n hw.ncpu)
ZPOOL=		${NAME}-pkgbase
ROOT=		/${ZPOOL}
ABI=		FreeBSD:15:amd64
PKG=		ABI=${ABI} IGNORE_OSVERSION=yes pkg -r ${ROOT}
PKGREPO=	freebsd
BASEREPO=	base_latest
PACKAGES=	pkg ca_root_nss
FILES=		${.CURDIR}/files
BOOTENV=	${BUILDDIR}/${NAME}.be

.PHONY: be checkzpool packages zpool build fetch clean import
.MAIN: ${BOOTENV}

${BOOTENV}:
	@mkdir -p ${BUILDDIR}
	if zpool status ${ZPOOL} > /dev/null 2>&1; then zpool destroy ${ZPOOL}; fi
	rm -f ${BUILDDIR}/${ZPOOL}.zpool
	truncate -s 10GB ${BUILDDIR}/${ZPOOL}.zpool && zpool create ${ZPOOL} ${BUILDDIR}/${ZPOOL}.zpool
	mkdir -p ${ROOT}/usr/share/keys
	rsync -a /usr/share/keys/pkg ${ROOT}/usr/share/keys
	${PKG} install -y -r ${PKGREPO} pkg ca_root_nss
	${PKG} install -y -r ${BASEREPO} $$(${PKG} search -r ${BASEREPO} -q FreeBSD- 2>/dev/null | grep -v '\-dbg\-15.' | grep -v '\-lib32\-15.' | grep -v '\-src\-15.')
	${INSTALL} -o 0 -g 0 -m 0644 ${FILES}/etc/rc.conf ${ROOT}/etc/rc.conf
	${INSTALL} -o 0 -g 0 -m 0644 ${FILES}/boot/loader.conf ${ROOT}/boot/loader.conf
	certctl -D ${ROOT} rehash
	zfs snapshot ${ZPOOL}@init

	${PKG} update -r ${BASEREPO}
	ABI=${ABI} BASEREPO=${BASEREPO} BOOTENV=${BOOTENV} BUILDDIR=${BUILDDIR} ZPOOL=${ZPOOL} ${.CURDIR}/scripts/make-be

packages:
	REPODIR=${REPODIR} ${MAKESRC} packages

build:
	${MAKESRC} buildworld buildkernel

fetch:
	if [ ! -d ${GITSRC} ]; then git clone ${GITREPO} ${GITSRC}; fi

import:
	export bepath=$$(realpath ${BOOTENV}) && \
	export bename=$$(echo $${bepath} | xargs basename | sed 's/\.be$$//') && \
	bectl import $${bename} < $${bepath}

clean:
	rm -rf ${BUILDDIR}

Added src/infra/gulliver/files/boot/loader.conf.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
aesni_load="YES"
geom_eli_load="YES"
security.bsd.allow_destructive_dtrace=0
kern.geom.label.disk_ident.enable="0"
kern.geom.label.gptid.enable="0"
cryptodev_load="YES"
zfs_load="YES"
#hw.usb.usbhid.enable="1"

Added src/infra/gulliver/files/etc/rc.conf.























































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
clear_tmp_enable="YES"
syslogd_flags="-ss"
#sendmail_enable="NONE" # this disables all mail, including periodic notices
hostname="beastie-current"
ifconfig_em0="SYNCDHCP"
#sshd_enable="YES"
ntpdate_enable="YES"
ntpd_enable="YES"
dumpdev="AUTO"
zfs_enable="YES"
#cloned_interfaces="bridge0 bridge1"
#ifconfig_bridge0_name="jails"
#ifconfig_jails="inet 192.168.2.1/24 up"
#ifconfig_bridge1_name="bhyves"
#ifconfig_bhyves="inet 192.168.3.1/24 up"
#pf_enable="YES"
#gateway_enable="YES"
ip6addrctl_enable="NO"
#nginx_enable="YES"
#kld_list="nvidia nvidia-modeset"
#dbus_enable="YES"
#moused_enable="YES"
keymap="us.ctrl"
#webcamd_enable="NO"
#tailscaled_enable="YES"
#postgresql_enable="YES"
#pcscd_enable="YES"

Added src/infra/gulliver/scripts/make-be.



































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
#!/bin/sh
set -e

abi=${ABI:?}
baserepo=${BASEREPO:?}
bootenv=${BOOTENV:?}
builddir=${BUILDDIR:?}
zpool=${ZPOOL:?}

main()
{
    snap=$(pkg search -q -r ${baserepo} 'FreeBSD-runtime-15' | sed -e 's/-runtime-/-/' -e 's/\./-/')
    zfs send ${zpool}@init > ${builddir}/$${snap}.be
    ln -f -s ${builddir}/${snap}.be ${bootenv}
}

main "${@}"

Added src/infra/p7x.sh/Makefile.





























>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
.PHONY: all cert test clean

CERTFILES= ${HOME}/Downloads/www_p7x_sh.pem ${HOME}/Downloads/www_p7x_sh.key

all: cert test clean

cert: ${CERTFILES}
	cat ${.ALLSRC} | ssh p7x.sh "cat > /home/patmaddox/certs/p7x.sh.pem && doas service haproxy reload"

test:
	fetch -q -o - https://p7x.sh

clean:
	rm -f ${CERTFILES}

Added src/inventory/Makefile.

































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
IMPL?=		crystal
IMPL_DIR=	${.CURDIR}/inventory-${IMPL}
PATH:=		${IMPL_DIR}/_build:${PATH}
.export PATH

.PHONY: test build

build:
	cd ${IMPL_DIR} && make

test: build _build/inventory_test
	./_build/inventory_test

_build/inventory_test: inventory_test.sh
	@mkdir -p ${.TARGET:H}
	shtk build -o ${.TARGET} -m shtk_unittest_main inventory_test.sh

Added src/inventory/inventory-crystal/.editorconfig.



















>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
root = true

[*.cr]
charset = utf-8
end_of_line = lf
insert_final_newline = true
indent_style = space
indent_size = 2
trim_trailing_whitespace = true

Added src/inventory/inventory-crystal/.gitignore.











>
>
>
>
>
1
2
3
4
5
/docs/
/lib/
/bin/
/.shards/
*.dwarf

Added src/inventory/inventory-crystal/LICENSE.











































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
MIT License

Copyright (c) 2024 Pat Maddox <pat@patmaddox.com>

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

Added src/inventory/inventory-crystal/Makefile.













>
>
>
>
>
>
1
2
3
4
5
6
.PHONY: build
CR_FILES!= find src -name '*.cr'

_build/inventory: ${CR_FILES}
	@mkdir -p _build
	crystal build -o _build/inventory src/inventory.cr

Added src/inventory/inventory-crystal/README.md.























































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
# inventory

TODO: Write a description here

## Installation

TODO: Write installation instructions here

## Usage

TODO: Write usage instructions here

## Development

TODO: Write development instructions here

## Contributing

1. Fork it (<https://github.com/your-github-user/inventory-crystal/fork>)
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Add some feature'`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create a new Pull Request

## Contributors

- [Pat Maddox](https://github.com/your-github-user) - creator and maintainer

Added src/inventory/inventory-crystal/shard.yml.



























>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
name: inventory
version: 0.1.0

authors:
  - Pat Maddox <pat@patmaddox.com>

targets:
  inventory:
    main: src/inventory.cr

crystal: '>= 1.10.1'

license: MIT

Added src/inventory/inventory-crystal/spec/inventory_spec.cr.



















>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
require "./spec_helper"

describe Inventory do
  # TODO: Write tests

  it "works" do
    false.should eq(true)
  end
end

Added src/inventory/inventory-crystal/spec/spec_helper.cr.





>
>
1
2
require "spec"
require "../src/inventory"

Added src/inventory/inventory-crystal/src/inventory.cr.

































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
require "./runner"

Inventory::Runner.new.run

class Inventory
  @filename = ".inventory"
  @items = [] of String
  @dirty = false

  def initialize(&)
    read
    yield self
    write if @dirty
  end

  def show
    puts @items.join("\n") if @items.any?
  end

  def set(item : String)
    unless @items.includes?(item)
      @items << item
      @dirty = true
    end
  end

  def delete(item : String)
    if @items.delete(item)
      @dirty = true
    else
      STDERR.puts "E: no item '#{item}'"
      exit 1
    end
  end

  private def read
    @items = File.read_lines(@filename) if File.exists?(@filename)
  end

  private def write
    if @dirty
      File.open(@filename, "w") do |f|
        @items.each {|i| f.puts(i) }
      end
      @dirty = false
    end
  end
end

Added src/inventory/inventory-crystal/src/runner.cr.























































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
require "option_parser"

class Inventory
  enum Command
    Show
    Set
    Delete
    Unknown
  end
  
  class Runner
    @parser : OptionParser
    @command = Command::Unknown
    @item : String|Nil
  
    def initialize
      @parser = init_parser
      @parser.parse
    end
  
    def run
      item = @item
      case @command
      in Command::Show
        Inventory.new {|i| i.show }
      in Command::Set
        Inventory.new {|i| i.set(item) } if item
      in Command::Delete
        Inventory.new {|i| i.delete(item) } if item
      in Command::Unknown
        puts @parser
        exit(1)
      end
    end

    private def init_parser
      OptionParser.new do |parser|
        parser.banner = "Usage: inventory [subcommand] [arguments]"
      
        parser.on("show", "Show the inventory") do
          @command = Command::Show
          parser.banner = "Usage: inventory show"
        end
      
        parser.on("set", "Set an item in inventory") do
          @command = Command::Set
          parser.banner = "Usage: inventory set [item]"
          @item = ARGV[-1]
        end
      
        parser.on("del", "Delete an item in inventory") do
          @command = Command::Delete
          parser.banner = "Usage: inventory del [item]"
          @item = ARGV[-1]
        end
      end
    end
  end
end

Added src/inventory/inventory_test.sh.



















































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
shtk_import unittest

shtk_unittest_add_test empty
empty_test() {
    assert_command inventory show
    assert_command test ! -f .inventory
}

shtk_unittest_add_test one_item
one_item_test() {
    assert_command inventory set pants
    assert_command -o inline:"pants\n" inventory show
}

shtk_unittest_add_test two_items
two_items_test() {
    assert_command inventory set pants
    assert_command inventory set shirt
    assert_command -o inline:"pants\nshirt\n" inventory show
}

shtk_unittest_add_test re_add
re_add_test() {
    assert_command inventory set pants
    assert_command inventory set shirt
    assert_command inventory set pants
    assert_command -o inline:"pants\nshirt\n" inventory show
}

shtk_unittest_add_test delete
delete_test() {
    assert_command inventory set pants
    assert_command inventory set shirt
    assert_command inventory del pants
    assert_command -o inline:"shirt\n" inventory show
}

shtk_unittest_add_test delete_missing
delete_missing_test() {
    assert_command -s 1 -e inline:"E: no item 'pants'\n" inventory del pants
}

Added src/jail-networking/epair.sh.



























































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
#!/bin/sh
set -e

jail_name="myjail"
host_iface=host_if
jail_iface=${jail_name}_if
host_ip=192.168.4.1
jail_ip=192.168.4.2
netmask=24

usage() {
    echo "Usage:"
    echo "epair.sh start"
    echo "epair.sh stop"
    echo "epair.sh test"
    echo "epair.sh pf # display current pf nat and rules"
}

start() {
    start_ifconfig
    start_jail
    start_pf
}

stop() {
    stop_pf
    stop_jail
    stop_ifconfig
}

start_ifconfig() {
    iface_a=$(ifconfig epair create up)
    iface_b=$(echo $iface_a | sed 's/a$/b/')
    ifconfig $iface_a name $host_iface > /dev/null
    ifconfig $iface_b name $jail_iface > /dev/null

    ifconfig $host_iface inet ${host_ip}/${netmask}
}

stop_ifconfig() {
    ifconfig $host_iface destroy
}

start_jail() {
    jail -c \
	 name=$jail_name \
	 persist \
	 vnet \
	 vnet.interface=$jail_iface

    jexec $jail_name ifconfig lo0 up
    jexec $jail_name ifconfig $jail_iface inet ${jail_ip}/${netmask}
    jexec $jail_name route add default $host_ip > /dev/null
}

stop_jail() {
    jail -r $jail_name
}

start_pf() {
    pfctl -f pf.conf
}

stop_pf() {
    pfctl -f /etc/pf.conf
}

pf() {
    pfctl -s nat
    pfctl -s rules
}

test() {
    echo "=== host: $host_ip"
    echo "=== jail: $jail_ip"

    ping="ping -q -W 1 -c 1"

    echo "=== ping host to self:"
    $ping $host_ip
    echo

    echo "=== ping host to jail:"
    $ping $jail_ip
    echo

    echo "=== ping jail to self:"
    jexec $jail_name $ping $jail_ip
    echo

    echo "=== ping jail to host"
    jexec $jail_name $ping $host_ip
    echo

    gateway=$(route -n get default | grep gateway | awk '{print $2}')
    echo "=== gateway: $gateway"
    echo "=== ping jail to gateway:"
    jexec $jail_name $ping $gateway
    echo

    echo "=== ping jail to 8.8.8.8:"
    jexec $jail_name $ping 8.8.8.8
    echo
    
    echo "OK"
}

command=${1:-usage}
$command

Added src/jail-networking/pf.conf.



























>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
ext_if = "em0"
jail_net = "192.168.4.0/24"

set skip on lo
scrub in

nat log (all) on $ext_if from $jail_net -> ($ext_if:0)

pass out
pass in

pass log (all) from $jail_net
pass log (all) to $jail_net

Added src/jail-proxy/README.md.







































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
# FreeBSD Jail Proxy

Inspired by FreeBSD forum thread: https://forums.freebsd.org/threads/isolating-host-machine-sending-traffic-through-a-jail-proxy.90070/

Setup:

- host machine
- one proxy jail
- other jails connect to internet through proxy jail

Config:

- host machine NAT to external network
- bridge interface on host
- jails set defaultgateway to proxy

## Issues

With this setup and pf.conf, packets going from jail -> proxy -> host don't get NAT.

This is evident by logging packets:

```
 00:00:07.022015 rule 2/0(match): pass in on proxy-bridge: 192.168.4.1 > 1.1.1.1: ICMP echo request, id 64483, seq 0, length 64
 00:00:00.000022 rule 0/0(match): nat out on em0: 192.168.1.126 > 1.1.1.1: ICMP echo request, id 42665, seq 0, length 64
 00:00:00.014273 rule 0/0(match): nat in on em0: 1.1.1.1 > 192.168.4.1: ICMP echo reply, id 64483, seq 0, length 64
 00:00:00.000003 rule 2/0(match): pass out on proxy-bridge: 1.1.1.1 > 192.168.4.1: ICMP echo reply, id 64483, seq 0, length 64

 00:00:12.705671 rule 2/0(match): pass in on epair11a: 192.168.4.12 > 1.1.1.1: ICMP echo request, id 8174, seq 0, length 64
 00:00:00.000013 rule 2/0(match): pass in on proxy-bridge: 192.168.4.12 > 1.1.1.1: ICMP echo request, id 8174, seq 0, length 64
 00:00:00.000007 rule 2/0(match): pass out on proxy-bridge: 192.168.4.12 > 1.1.1.1: ICMP echo request, id 8174, seq 0, length 64
 00:00:00.000003 rule 2/0(match): pass out on epair10a: 192.168.4.12 > 1.1.1.1: ICMP echo request, id 8174, seq 0, length 64
 00:00:00.000030 rule 2/0(match): pass in on proxy-bridge: 192.168.4.12 > 1.1.1.1: ICMP echo request, id 8174, seq 0, length 64
 00:00:00.000004 rule 2/0(match): pass out on em0: 192.168.4.12 > 1.1.1.1: ICMP echo request, id 8174, seq 0, length 64
```

Added src/jail-proxy/jail.sh.







































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
#!/bin/sh
set -e

if ! ifconfig proxy-bridge > /dev/null 2>&1; then
    ifconfig bridge create name proxy-bridge up
fi

if ! ifconfig epair254a > /dev/null 2>&1; then
    ifconfig epair254 create up
    ifconfig epair254a inet 192.168.4.254/24
    ifconfig proxy-bridge addm epair254b
    ifconfig epair254b up
fi

if ! ifconfig epair10a > /dev/null 2>&1; then
    ifconfig epair10 create up
    ifconfig proxy-bridge addm epair10a
fi

jail -c \
     name=proxy \
     persist \
     vnet \
     vnet.interface=epair10b

jexec proxy ifconfig lo0 up
jexec proxy ifconfig epair10b inet 192.168.4.1/24 up
jexec proxy route add default 192.168.4.254 # host
jexec proxy sysctl net.inet.ip.forwarding=1

#if ! ifconfig epair11a > /dev/null 2>&1; then
#    ifconfig epair11 create up
#    ifconfig proxy-bridge addm epair11a
#fi
#
#jail -c \
#     name=jail_11 \
#     persist \
#     vnet \
#     vnet.interface=epair11b
#
#jexec jail_11 ifconfig epair11b inet 192.168.4.11/24 up
#jexec jail_11 route add default 192.168.4.1 # proxy

# tests
ping="ping -q -c 1"
host=192.168.4.254
proxy=192.168.4.1

echo "host: $host"
echo "proxy: $proxy"

echo -n "ping host to self..."
$ping $host > /dev/null
echo "OK"

echo -n "ping host to proxy..."
$ping $proxy > /dev/null
echo "OK"

echo -n "ping proxy to self..."
jexec proxy $ping $proxy > /dev/null
echo "OK"

echo -n "ping proxy to host..."
jexec proxy $ping $host > /dev/null
echo "OK"

Added src/jail-proxy/pf.conf.



































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
ext_if = "em0"
proxy_if = "proxy-bridge"
proxy_net = "192.168.4.0/24"

#set skip on lo
#scrub in

#nat log (all) on $ext_if from $proxy_net -> ($ext_if:0)
nat log (all) on $ext_if to 1.1.1.1 -> ($ext_if:0)
#nat on $proxy_if from ! 192.168.4.1 to ! $proxy_if -> ($ext_if:0)

pass out
#block in
pass in

pass log (all) from $proxy_net
pass log (all) to $proxy_net

Added src/mg-agg/Makefile.



















































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
area72.json:
	jq -s add ${.ALLSRC} > ${.TARGET}

clean:
	rm -f *.json

area72.json: d3.json
d3.json:
	fetch -o ${.TARGET} https://www.skyvalleydistrict3aa.org/_functions/api/Items

area72.json: d4.json
d4.json:
	fetch -o ${.TARGET} http://district4aa-wa.org/wp-admin/admin-ajax.php?action=meetings

area72.json: d8.json
d8.json:
	fetch -o ${.TARGET} https://aadistrict8.org/wp-admin/admin-ajax.php?action=meetings

area72.json: d21.json
d21.json:
	fetch -o ${.TARGET} https://aa21.org/wp-admin/admin-ajax.php?action=meetings

area72.json: whatcom.json
whatcom.json:
	fetch -o ${.TARGET} https://whatcomaa.org/wp-admin/admin-ajax.php?action=meetings

Added src/pof/Makefile.



































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
.PHONY: build test clean

build: _build/pof

test: _build/pof_test _build/pof
	cd test && PATH=${.CURDIR}/_build:$$PATH pof_test

_build/pof: src/pof.sh
	@mkdir -p ${.TARGET:H}
	shtk build -o ${.TARGET} src/pof.sh

_build/pof_test: test/pof_test.sh
	@mkdir -p ${.TARGET:H}
	shtk build -o ${.TARGET} -m shtk_unittest_main test/pof_test.sh

clean:
	rm -rf _build

Added src/pof/README.md.



























































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# pof - pile of files

A simple, content-addressable key-value store.

Goals:

- lightweight
- clear interface
- trustworthy
- fast enough

Future:

- content-defined chunking
- lock local files - don't need to store a second copy of chunks, and can read directly from a file if requested

## Usage

local:

```sh
pof put file://.pof foobar            # => sha256-abc123
pof get file://.pof sha256-abc123 bar # write to bar
pof get file://.pof sha256-abc123 -   # write to stdout

pof put -c md5 file://.pof foobar  # => md5-def456
```

ssh:

```sh
pof put ssh://nas/.pof foobar            # => sha256-abc123
pof get ssh://nas/.pof sha256-abc123 bar # write to bar
pof get ssh://nas/.pof sha256-abc123 -   # write to stdout
```

## File Format

Inspired by Fossil SCM

A POF archive is an unordered set of artifacts.
Artifacts may either be control artifacts, or content artifacts.
pof infers control artifacts from their internal structure, which are defined as cards.

An artifact card begins with the artifact checksum, followed by an ordered list of chunks, which are themselves artifacts.
Each chunk line includes the checksum, number of bytes, and byte offset in the file.
The final line includes a checksum of the artifact card content up to that point, including a new line.

```
A sha256-abc123 1000
F sha256-def456 100 0
F sha256-ghi789 800 100
F sha256-jkl0ab 100 900
Z sha256-foobar 91
```

In the above example, the `Z sha256-foobar 91` shows the checksum of the preceeding content, and the number of bytes.

## Protocol

Chunking files allows nodes to exchange only the chunks that the other side needs. 

put:

1. source generates an artifact card
2. source sends artifact card to target
3. target replies with a list of artifacts it needs
4. source sends a card with artifact contents
5. target replies with empty list of artifacts needed

e.g.

source generates a card to send to target:

```
A sha256-abc123 1000
F sha256-def456 100 0
F sha256-ghi789 800 100
F sha256-jkl0ab 100 900
Z sha256-foobar 91
```

target replies with a list of artifacts it needs:

```
A sha256-abc123
F sha256-ghi789
Z sha256-dogcat 32
```

source sends a card with artifact contents:

```
B sha256-ghi789 800 <bytes>
Z sha256-froggy 28
```

target replies with empty list of artifacts needed:

```
A sha256-abc123
Z sha256-spider 16
```

## notes

While not necessarily implemented using HTTP, the protocol is very similar to REST / resource-oriented.
GET and PUT are the main actions.
GET may result in a success, not found, or redirect to a manifest.

Added src/pof/src/pof.sh.









































































































































































































































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
shtk_import cli

set -e

main() {
    local log_to_stderr
    local o
    while getopts 'e' o; do
	case "${o}" in
	    e)
		log_to_stderr="1"
		shift
		;;
	    *)
		shtk_cli_error "Unknown option -${e}"
		;;
	esac
    done
    local cmd="${1}"; shift
    pof_${cmd} "${@}"
}

pof_put() {
    local protocol
    local repo
    parse_url "${1}"; shift
    local file="${1}"; shift
    local sum=sha256-$(sha256sum -q $file)

    case "${protocol}" in
	file)
	    cp $file $repo/$sum
	    echo $sum
	    ;;
	stdio)
	    transmit_file $file $sum
	    read response
	    if [ "${log_to_stderr}" = "1" ]; then
		echo "$response" 1>&2
	    else
		echo "$response"
	    fi
	    ;;
	*)
	    shtk_cli_error "$protocol is not a valid pof protocol"
	    ;;
    esac
}

pof_get() {
    local protocol
    local repo
    parse_url "${1}"; shift
    local sum="${1}"; shift
    local output="${1}"; shift
    local file=$repo/$sum

    case "${protocol}" in
	file)
	    if [ "${output}" = "-" ]; then
		cat $file
	    else
		cp $file $output
	    fi
	    ;;
	stdio)
	    receive_file $output $sum
	    ;;
	*)
	    shtk_cli_error "$protocol is not a valid pof protocol"
	    ;;
    esac
}

pof_daemon() {
    local repo="${1}"; shift
    local infiles=${repo}/.infiles
    mkdir -p ${infiles}
    local infile=$(mktemp -p ${infiles} -t pof.infile)
    local line
    read line
    local control=$(echo "${line}" | cut -w -f 1)
    case "${control}" in
	P)
	    local begsum=$(echo "${line}" | cut -w -f 2)
	    local size=$(echo "${line}" | cut -w -f 3)
	    dd of=${infile} bs=${size} count=1 2>/dev/null
	    read line
	    local control=$(echo "${line}" | cut -w -f 1)
	    local endsum=$(echo "${line}" | cut -w -f 2)
	    if [ "${control}" != "Z" ]; then
		shtk_cli_error "last line must be 'Z <sum>"
		exit 1
	    fi
	    if [ "${begsum}" != "${endsum}" ]; then
		shtk_cli_error "start sum ${begsum} does not match end sum ${endsum}"
		exit 1
	    fi
	    
	    local sum=sha256-$(sha256sum -q ${infile})
	    if [ "${sum}" != "${endsum}" ]; then
		shtk_cli_error "received sum ${endsum} does not match calculated sum ${sum}"
		exit 1
	    fi
	    mv ${infile} ${repo}/${sum}
	    echo "${sum}"
	    ;;
	G)
	    local requested_sum=$(echo "${line}" | cut -w -f 2)
	    local file=${repo}/${requested_sum}
	    if [ ! -f "${file}" ]; then
		shtk_cli_error "Cannot find file ${file}"
		exit 1
	    fi
	    local sum=sha256-$(sha256sum -q ${file})
	    if [ "${requested_sum}" != "${sum}" ]; then
		shtk_cli_error "requested sum ${requested} does not match calculated sum ${sum}"
		exit 1
	    fi
	    local size=$(stat -f %z $file)
	    echo "A ${sum} ${size}"
	    cat ${file}
	    echo "Z ${sum}"
	    ;;
	*)
	    shtk_cli_error "unknown control character ${control}"
	    exit 1
	    ;;
    esac
}

transmit_file() {
    local file="${1}"; shift
    local sum="${1}"; shift
    local size=$(stat -f %z $file)
    echo "P ${sum} ${size}"
    cat ${file}
    echo "Z ${sum}"
}

receive_file() {
    local outfile="${1}"; shift
    local infile="${outfile}.infile"
    local sum="${1}"; shift
    echo "G ${sum}"

    local line
    read line
    local control=$(echo "${line}" | cut -w -f 1)
    local begsum=$(echo "${line}" | cut -w -f 2)
    local size=$(echo "${line}" | cut -w -f 3)
    if [ "${control}" != "A" ]; then
	shtk_cli_error "first line must be 'A <sum> <size>'"
    fi
    if [ "${outfile}" = "-" ]; then
	if [ "${log_to_stderr}" = "1" ]; then
	    dd bs=${size} count=1 2>/dev/null | tee 1>&2
	else
	    dd bs=${size} count=1 2>/dev/null
	fi
    else
	dd of=${infile} bs=${size} count=1 2>/dev/null
    fi	    
    read line
    if [ "${outfile}" = "-" ]; then
 	exit 0
    fi
    local control=$(echo "${line}" | cut -w -f 1)
    local endsum=$(echo "${line}" | cut -w -f 2)
    if [ "${control}" != "Z" ]; then
	shtk_cli_error "last line must be 'Z <sum>"
	exit 1
    fi
    if [ "${begsum}" != "${endsum}" ]; then
	shtk_cli_error "start sum ${begsum} does not match end sum ${endsum}"
	exit 1
    fi

    local sum=sha256-$(sha256sum -q ${infile})
    if [ "${sum}" != "${endsum}" ]; then
	shtk_cli_error "received sum ${endsum} does not match calculated sum ${sum}"
	exit 1
    fi
    mv ${infile} ${outfile}
}

parse_url() {
    local url="${1}"; shift
    if [ "${url}" = "-" ]; then
	protocol=stdio
	repo="-"
    else
	protocol=$(echo "${url}" | grep -o '^[[:alnum:]]*:\/\/' | sed 's|://$||')
	repo=$(echo "${url}" | grep -o ':\/\/.*$' | sed 's|^://||')
    fi
}

Added src/pof/test/pof_test.sh.



































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
shtk_import unittest

shtk_unittest_add_test put_get
put_get_test() {
    pofdir=$(mktemp -d -t pof.test)
    file1=$pofdir/file1
    file2=$pofdir/file2
    sum=$pofdir/sum

    echo "hello pof" > $file1
    assert_command -o not-empty -o save:$sum pof put file://$pofdir $file1

    assert_command pof get file://$pofdir $(cat $sum) $file2
    assert_file file:$file1 $file2

    assert_command -o inline:"hello pof\n" pof get file://$pofdir $(cat $sum) -
}

shtk_unittest_add_test stdio_client_server
stdio_client_server_test() {
    pofdir=$(mktemp -d -t pof.test)
    file1=$pofdir/file1
    file2=$pofdir/file2
    sum=$pofdir/sum

    echo "hello pof" > $file1
    assert_command -e save:$sum socat exec:"pof -e put - $file1" exec:"pof daemon $pofdir"

    assert_command socat exec:"pof get - $(cat $sum) $file2" exec:"pof daemon $pofdir"
    assert_file file:$file1 $file2

    assert_command -e inline:"hello pof\n" socat exec:"pof -e get - $(cat $sum) -" exec:"pof daemon $pofdir"
}

Added topics.md.









































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
# writing topics

## www/tmp dir

a place to add stuff that I want to publish, that I may / probably will eventually delete

## simulating a sliced monorepo

use worktrees for the content
symlink the dirs you care about
"monocle" - a tool to slice the monorepo

## elixir service with root privileges

- read PIDs, kill one from user input (reading PIDs uses NIF?)
- run a script

probably the elixir service runs non-privileged, and a smaller privileged service does the dirty work

## what I would do differently

- hexcode interview asked me about this
- at the time I just said "I would do more"
- I followed up with "share with people"
- thought about it some more, comes down to two things:
  1. keep my library
  2. commit to open source OS and editor (asset vs utility)

## publishing workflow

- drafts branch
- when it's ready, _copy_ the file into published trunk location, delete draft

## Public and private repos using Fossil child repos

- Fossil's child repos are a really cool feature.
- I can have my home dir be a child repo of the public repo.
- Commit personal stuff to the child repo.
- That way I can have a public version and a private version, without having to keep two separate repos.

## As Pat Sees It

- Team collaboration often tends toward deciding "how are we going to do X?"
- People disagree, nobody makes a decision, nothing happens.
- Instead, we each bring our own views to the table.
- e.g. what is the project overview?
- It would be great if we had a shared source of truth - but that often doesn't happen.
- Instead, I can make a "As Pat sees it" page, and invite other people to comment / participate.

## Misc

- How I make this site

Added wip/Justfile.













































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
branches_dir := "~/.project-branches"

help:
  @just -l

alias b := branches
branches:
  fossil branch | grep -v trunk | sed -e 's/*//' -e 's/^[[:space:]]*//'

alias o := open
# open a branch and symlink dir
open p: _mk-dirs
  #!/bin/sh
  set -e
  if [ ! -d {{branches_dir}}/{{p}} ]; then
    fossil open ~/data/fossils/patmaddox.com.fossil {{p}} --workdir {{branches_dir}}/{{p}} --nested
  fi
  if [ ! -d {{p}} ] && [ -d {{branches_dir}}/{{p}}/{{p}} ]; then
    mkdir -p $(dirname {{p}})
    ln -s $(realpath {{branches_dir}}/{{p}}/{{p}}) {{p}}
  fi

open-all:
  #!/bin/sh
  set -e
  for b in $(just branches); do just open $b; done

new-article name: _mk-dirs
  #!/bin/sh
  set -e
  proj_name=www/{{name}}
  fossil branch new ${proj_name} trunk
  fossil open ~/data/fossils/patmaddox.com.fossil ${proj_name} --workdir {{branches_dir}}/${proj_name} --nested
  mkdir -p {{branches_dir}}/${proj_name}/${proj_name}
  echo '# ...' > {{branches_dir}}/${proj_name}/${proj_name}/index.md
  mkdir -p $(dirname ${proj_name})
  ln -s $(realpath {{branches_dir}}/${proj_name}/${proj_name}) ${proj_name}
  cd ${proj_name}} && fossil add index.md

_mk-dirs:
  mkdir -p {{branches_dir}}

new-pastebin name:
  #!/bin/sh
  set -e
  prefix=$(date "+%Y%m%d%H%M%S")
  pbdir=pastebin/${prefix}-{{name}}
  fossil branch new ${pbdir} trunk
  fossil open ~/data/fossils/patmaddox.com.fossil ${pbdir} --workdir {{branches_dir}}/${pbdir} --nested
  mkdir -p {{branches_dir}}/${pbdir}/${pbdir}
  echo '# ...' > {{branches_dir}}/${pbdir}/${pbdir}/index.md
  mkdir -p $(dirname ${pbdir})
  ln -s $(realpath {{branches_dir}}/${pbdir}/${pbdir}) ${pbdir}
  cd ${pbdir} && fossil add index.md

new-draft name:
  #!/bin/sh
  set -e
  prefix=$(date "+%Y%m%d")
  draftsdir=drafts/${prefix}-{{name}}
  fossil branch new ${draftsdir} trunk
  fossil open ~/data/fossils/patmaddox.com.fossil ${draftsdir} --workdir {{branches_dir}}/${draftsdir} --nested
  mkdir -p {{branches_dir}}/${draftsdir}/${draftsdir}
  echo '# ...' > {{branches_dir}}/${draftsdir}/${draftsdir}/index.md
  mkdir -p $(dirname ${draftsdir})
  ln -s $(realpath {{branches_dir}}/${draftsdir}/${draftsdir}) ${draftsdir}
  cd ${draftsdir} && fossil add index.md

ls:
  find . -type l -depth 2 | sed -e 's|./||' | sort

Added www/experiments.md.



























>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
# Experiments

- `fossil-stage` - for staging commits, following the process I outlined in [this forum post](https://fossil-scm.org/forum/forumpost/471ce9312d)
- `fossil-publish`
  Develop content in one branch, squash it to the main branch
  I want to have a `drafts` branch where I do my writing.
  When I'm ready to publish it, I will copy the final file into trunk.
  Then I'll commit in trunk, and merge it to drafts.
  Hopefully it doesn't blow up :)
  - This seems to work pretty well.
    This is the same scenario I documented in [this forum post](https://fossil-scm.org/forum/forumpost/30716c59bafcf6e2).
    I wonder how the [proposed fix](https://fossil-scm.org/forum/forumpost/6629813f6f) affects this?
    The content is the same, so I would not expect to get a conflict.

Added www/fossil_wishlist.md.











































































































































































































































































































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
# Fossil Wishlist

There are a few things I wish Fossil did, that I will perhaps try to implement.

They are listed in the order that I intend to work on them, based on a combination of interest and difficulty.

- **Integrate GFM** - fork [cmark-gfm](https://github.com/github/cmark-gfm) and incorporate Fossil-specific markdown rendering
- **Single simple browser** - separate `/doc` `/file` `/dir` seems unnecessary
- **Auto-redirect on 404** - so I can move files without breaking links
- **Merge request / diff review** - to review changes in a branch before merging

## Markdown checklists

- Interest: High
- Difficulty: Low
- Status: Not started

Example:

```
- [ ] unchecked
- [x] checked
```

Current behavior:

- [ ] unchecked
- [x] checked

Desired behavior:

<input type="checkbox" name="cb_a"> <label for="cb_a">unchecked</label><br/>
<input type="checkbox" name="cb_b" checked> <label for="cb_b">checked</label>

## Auto-redirect docs on 404

- Interest: High
- Difficulty: Medium
- Status: Not started

I want to use Fossil's docs feature to publish my writing.
One problem I've run into is if I re-organize or re-name anything, links are broken.
Fossil will have a record of any moves that happen, so requesting a doc should 301 to the current location.

Seems like I can't rely on event mtime for ordering.
Try searching for the same manifest id instead.

```sql
SELECT filename.fnid,
       filename.name,
       tag.tagname
FROM   mlink
       JOIN tagxref
         ON tagxref.rid = mlink.mid
       JOIN tag
         ON tagxref.tagid = tag.tagid
       JOIN filename
         ON filename.fnid = mlink.fnid
WHERE  tagname = 'sym-trunk'
       AND filename.name = 'foo.md'
	   LIMIT 1;
```

Draft post to Fossil forum:

```
Hi there, I have a contribution to submit for consideration and feedback.

The basic purpose of this contribution is that I should be able to publish a link to `/doc/trunk/foo.md`, later rename it to `bar.md`, and have visitors be redirected to `bar.md` rather than get a 404.

- **Problem**: moving files breaks `/doc` links, which may be published / shared outside of the Fossil repo
- **Desired outcome**: Fossil redirects to the new file name, because it has a history of the move
- **Contribution**: [doc-mv-301-redirect-branch](https://chiselapp.com/user/patmaddox/repository/fossil-scm/timeline?r=doc-mv-redirect-301)

How it works: when a user requests a `/doc` link that would result in a 404, Fossil checks to see if it was previously a valid file name that has since been moved. If so, it responds with a 301 redirect

## Concern: Potential memory leak

I'm concerned it introduces a memory leak. Do I need to call `fossil_free` on `zTagName` and/or `zNewFileName`?

## Implementation note: `tip`

I opted not to make this work with `/doc/tip/filename`. Because `tip` could refer to any branch at any time, I'm not sure it makes sense to apply this behavior. If there are no leaves with `foo.md` in them, I think `/doc/tip/foo.md` should reflect that and 404.

## Implementation note: branch history

1. add foo, commit to trunk
2. rename to bar, commit to branch1
3. rename to baz, commit to branch2

A request for /doc/branch2/foo.md will 404 because branch2 has no commits that reference foo. I think this is okay, as the point is to have links redirected on a particular branch, not necessarily on any branch.

## Potential for improvement: Recursive resolution

Right now, this will 301 with the next known path.
If the file has been moved a few times, the browser will make multiple requests, receiving a 301 each time until it reaches the end.
Fossil could resolve the final destination as part of the request, so only a single 301 is necessary.

## Question: What to do when a file has been removed?

1. add foo, commit
2. rename foo to bar, commit
3. add foo again, commit
4. remove foo, commit

Requesting /foo will direct to /bar, even though /foo has explicitly been removed.

This should potentially filter based on mlink mtime to not redirect to a path that occured before a delete, or reference the manifest to determine if there was a move.
```

## Syntax highlighting

Basically I want GFM + whatever Fossil-specific features it needs to support links.

## Merge requests / diff review

Some kind of diff review would be helpful.
I've used GitHub / GitLab style a bunch, but also liked what FreeBSD does with differential.

## Doc links to dir / files using `$CURRENT`

- Interest: Medium
- Difficulty: Low
- Status: Not started

Docs are part of a current checkout.
They should be able to produce links with `$CURRENT` in the target.
Even better would be to have a special link command.
`[foo](dir:foo)` to link to relative, `[foo](dir:/foo)` to link to absolute.

Potential snag: files on disk may not have been added to the repo.
How will the file explorer handle this?
e.g. if I have a link `[foo](file:foo.md) where foo is uncommitted.

## [Summary Commits](https://fossil-scm.org/forum/forumpost/20853693af7a605c)

- Interest: High
- Difficulty: High
- Status: Not started

Tag a range of commits so they appear as a single commit on the timeline.
Expand the summaries to see the details.

## Annexed files

- Interest: Very high
- Difficulty: Very high
- Status: Not started

[git-annex](https://git-annex.branchable.com/) for Fossil.

One big question: how many files can Fossil handle?
Git is slow when you get to ~200k files or more.

## (Maybe) Show / link to history in docs

Docs are great for presenting information, but they can change.
It's probably a simple matter of updating a template to link to the doc history.

## (Maybe) Render dir markdown as docs

Or more likely support the ?ci=ckout special checkin.

I want to link to the dir page so I can see the file structure.
Links in markdown files (especially README) should still work.

Added www/inbox.md.











>
>
>
>
>
1
2
3
4
5
# Inbox

- [How "Exit Traps" Can Make Your Bash Scripts Way More Robust And Reliable](http://redsymbol.net/articles/bash-exit-traps/)
  - The article says bash, I assume it's the same for sh?
- [Courgette](https://www.chromium.org/developers/design-documents/software-updates-courgette/) binary diff tool from chromiumxx

Added www/notes/add-freebsd-package-categories.md.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
# Add FreeBSD package categories

In make.conf:

`VALID_CATEGORIES+= my_category`

- `/usr/local/etc/poudriere.d/make.conf` (or more specific conf files)

Added www/notes/disable-ssh-agent-xfce4.md.











>
>
>
>
>
1
2
3
4
5
# Disable ssh-agent in xfce4

`xfconf-query -c xfce4-session -p /startup/ssh-agent/enabled -n -t bool -s false`

from https://notebook.niklaas.eu/xfce-disable-ssh-agent/

Added www/notes/freebsd-swap-caps-lock-and-control.md.



























>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
# Swap caps lock and control

Per-session: `setxkbmap -option ctrl:nocaps`

Permanent:

```
Section "InputClass"
	Identifier "Map caps lock to ctrl"
        MatchIsKeyboard "on"
        Option "XkbOptions" "ctrl:nocaps"
EndSection
```

Added www/notes/freebsd-zfs-on-root-gcp-aws-poudriere.md.





























>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
# FreeBSD ZFS-on-root on GCP and AWS with Poudriere

## GCP

- Make a disk.raw file, then `tar --format=gnutar -zcf myimage.tar.gz disk.raw` (from src/release GCE tool)
- Upload it to GCP storage
- `gcloud compute images create myimage --source-uri=gs://mybucket/myimage.tar.gz --guest-os-features=UEFI_COMPATIBLE`

## AWS

- Upload image to S3
- Create a snapshot: `aws ec2 import-snapshot --disk-container Format=raw,Url=s3://mybucket/myimage.img`
- Make an AMI (use UEFI boot)
  I used console, need to figure out command line

Added www/notes/xfce4-suspend-resume-menu-lock-display.md.



































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
# xfce4 suspend / resume menu - lock display on suspend

xfce4 settings -> session and startup -> enable "lock screen before sleep"

need to use the xfce4 menu suspend rather than `doas zzz`

https://forums.freebsd.org/threads/xfce-logout-menu-lack-of-suspend-hibernate-menu.74771/

## Activate stop / suspend options

```
polkit.addRule(function(action, subject) {
  if((action.id == "org.freedesktop.consolekit.system.restart" || action.id == "org.freedesktop.consolekit.system.stop" || action.id == "org.freedesktop.consolekit.system.suspend") && subject.isInGroup("operator")) {
    return polkit.Result.YES;
  }
});
```

Added www/repo-wishlist.md.









































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
# Repo Wishlist

- [ ] Render org-files to markdown
  - This isn't hard.
	But I want to do it in a way that works for each of my repos.
	I've thought about using a CGI script to render on the fly.
	The simplest thing is to export org to markdown and commit.
- [ ] Verify links
  - I use a bunch of markdown links.
	Are they valid?
  - Where should this be handled?
	Is it a static file checker, or should it be handled by the server?
  - I think a static checker makes more sense, because it's compatible with any Markdown-based generator.
- [ ] Automatic documentation back links
- [ ] Table of contents & heading links
- [ ] Script to make a new checkout
  - This is trivial if I hardcode stuff.
  - Maybe I follow a simple convention.
  - If there's a _checkouts dir, check it out there.
  - Otherwise, check it out parallel to the current checkout dir.

Added www/scrapheap.md.

















>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
# The Scrap Heap

These are experiments I worked on that I've since abandoned.

- [**zolatest:**](/dir?name=scraps/zolatest) an OSS site generator
  - It's pretty nice, but rather complex, and I have Fossil.
- [**mksite:**](/dir?name=scraps/mksite) a static site generator using `make`
  - I liked this quite a bit, but I don't need it anymore now that I'm using Fossil.

Added www/tmp/fossil-merge-history/clean-history.mk.











































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
WRKDIR=	/tmp/fossil-clean-history
CD=	cd ${WRKDIR}

${WRKDIR}/repo.fossil:
	mkdir -p ${WRKDIR}
	fossil init ${.TARGET}
	${CD} && fossil open repo.fossil
	echo "c1 trunk" >> ${WRKDIR}/foo
	${CD} && fossil addrem && fossil commit -m "c1 trunk"
	echo "c1 b1" >> ${WRKDIR}/bar
	${CD} && fossil addrem && fossil commit -m "c1 b1" --branch b1
	echo "c2 b1" >> ${WRKDIR}/bar
	${CD} && fossil addrem && fossil commit -m "c2 b1"
	${CD} && fossil up trunk && fossil merge b1 && fossil commit -m "merge b1"
	@echo "=== fossil timeline (complete)"
	${CD} && fossil timeline
	@echo "=== fossil timeline (trunk)"
	${CD} && fossil timeline -b trunk

clean:
	rm -rf ${WRKDIR}

Added www/tmp/fossil-merge-history/index.md.















>
>
>
>
>
>
>
1
2
3
4
5
6
7
can I make a bunch of junk commits
and still have a clean history?

can I re-use a branch
without having to keep merging trunk in?

let's see

Added www/tmp/fossil-merge-history/reuse-branch.mk.









































































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
WRKDIR=	/tmp/fossil-reuse-branch
CD=	cd ${WRKDIR}

${WRKDIR}/repo.fossil:
	mkdir -p ${WRKDIR}
	fossil init ${.TARGET}
	${CD} && fossil open repo.fossil

	echo "c1 trunk" >> ${WRKDIR}/foo
	${CD} && fossil addrem && fossil commit -m "c1 trunk"

	echo "c1.1 b1" >> ${WRKDIR}/bar
	${CD} && fossil addrem && fossil commit -m "c1.1 b1" --branch b
	echo "c1.2 b1" >> ${WRKDIR}/bar
	${CD} && fossil addrem && fossil commit -m "c1.2 b1"

	${CD} && fossil up trunk && fossil merge --integrate b && fossil commit -m "merge b1"
	echo "c2 trunk" >> ${WRKDIR}/foo
	${CD} && fossil commit -m "c2 trunk"

	echo "c2.1 b1" >> ${WRKDIR}/bar
	${CD} && fossil addrem && fossil commit -m "c2.1 b1" --branch b
	echo "c2.2 b1" >> ${WRKDIR}/bar
	${CD} && fossil addrem && fossil commit -m "c2.2 b1"

	${CD} && fossil up trunk && fossil merge --integrate b && fossil commit -m "merge b1"

	@echo "=== fossil timeline (complete)"
	${CD} && fossil timeline
	@echo "=== fossil timeline (trunk)"
	${CD} && fossil timeline -b trunk
	@echo "=== fossil timeline (b)"
	${CD} && fossil timeline -b b

clean:
	rm -rf ${WRKDIR}

Added www/todo.md.





































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
# TODO

- [ ] learn to write man pages
- [ ] generate a list of all docs (maybe with search?)
- [ ] figure out how to render the title on docs
- [ ] _Add a project prefix git commit hook._
  With a monorepo, most commit messages end up looking something like "component: the commit message".
  I can type them in, but I might forget.
  I'd like to define a file (e.g. `.project`) that automatically provides a prefix for the commit message.
- [ ] _Set emacs starting state_. Full screen, with the font size I like (might be different per machine)
- [ ] Serve via rc
  - Currently I just have a tmux session running on the server.
    It would be good to incorporate it into the rc system.
    [Example OpenBSD rc script](https://fossil-scm.org/home/doc/trunk/www/server/openbsd/service.wiki) in fossil docs.
- [ ] start incorporating my mess of stuff into this repo...
- [ ] Update elixir-mode.el to depend on pkg-info.el and epl.el (or elixir-mode.el -> pkg-info.el -> epl.el)
- [ ] Experiment with netgraph
- [ ] Build a system to manage ports trees

Added www/wishlist-vs-todo.md.

































>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
# Wishlist vs TODOs

I've recorded some [TODO](todo.md) items...
for whatever reason, I find TODO a bit boring and uninspiring.
Plus, I don't know when or if I will get to some of those things.

Instead (or in addition), I'm going to record wish lists.
I will pick some items to actually work on and add to my TODO list.

I suppose David Allen would call these "someday / maybe projects."
I've always found those to be a bog of ideas that I'll never get to.

Wish list has more power to me - it indiciates a desired future state in a particular area.

- [Fossil Wishlist](fossil_wishlist.md)
- [Repo Wishlist](repo-wishlist.md)