Update add_table_names script
Add a new script, tools/add_table_names, that adds to the output of ovs-ofctl dump-flows the table names, including those allocated by the new datapath. It relies on the new datapath output file. Change-Id: I82069d1d084145ea0a55fd8c7ea2573ba5a86c4f
This commit is contained in:
parent
6f94d70ec3
commit
7b8fd6dd4d
|
@ -539,11 +539,6 @@ function setup_rootwrap_filters {
|
|||
fi
|
||||
}
|
||||
|
||||
function create_tables_script {
|
||||
echo "Creating add_table_names script"
|
||||
$DRAGONFLOW_DIR/tools/create_add_tables_script.sh $DRAGONFLOW_DIR $DRAGONFLOW_DIR/tools/add_table_names
|
||||
}
|
||||
|
||||
function stop_df_bgp_service {
|
||||
if is_service_enabled df-bgp ; then
|
||||
echo "Stopping Dragonflow BGP dynamic routing service"
|
||||
|
@ -607,8 +602,8 @@ function handle_df_stack_post_install {
|
|||
start_df_metadata_agent
|
||||
start_df_bgp_service
|
||||
setup_rootwrap_filters
|
||||
create_tables_script
|
||||
start_df_skydive
|
||||
install_package jq
|
||||
}
|
||||
|
||||
function handle_df_stack {
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
#!/bin/awk -f
|
||||
|
||||
|
||||
|
||||
BEGIN {
|
||||
CONTROLLER_CONSTANTS_FILE="CONTROLLER_CONSTANTS_FILE" in ENVIRON ? ENVIRON["CONTROLLER_CONSTANTS_FILE"] : "/opt/stack/dragonflow/dragonflow/controller/common/constants.py";
|
||||
DATAPATH_ALLOCATION_FILE="DATAPATH_ALLOCATION_FILE" in ENVIRON ? ENVIRON["DATAPATH_ALLOCATION_FILE"] : "/var/run/dragonflow/dragonflow_datapath_allocation.json";
|
||||
# id_to_name from python file
|
||||
cmd = "awk '/^[^#].*TABLE[\\w]*/ {split($0, a, \" *= *\"); name=a[1]; id=a[2]; print id \" \" name}' " CONTROLLER_CONSTANTS_FILE
|
||||
while (cmd | getline) {
|
||||
split($0, a);
|
||||
id_to_name[ a[1] ] = a[2];
|
||||
}
|
||||
close(cmd)
|
||||
|
||||
# id_to_name from jq
|
||||
jq_code = "\
|
||||
. as $all | \n\
|
||||
def invert: . as $in |\n\
|
||||
$in | keys as $ks |\n\
|
||||
reduce $ks[] as $k ({}; .[$in[$k] | tostring] = $k);\n\
|
||||
def invert_and_merge: reduce . as $obj ({}; . |= $obj | invert);\n\
|
||||
def map_keys(f): . as $obj | $obj | keys as $ks | reduce $ks[] as $k ({}; .[$k | f] = $obj[$k]);\n\
|
||||
def on_exists($v2): . + \"/\" + $v2; \n\
|
||||
def join($new): . as $orig | $new | keys as $n_ks |\n\
|
||||
reduce $n_ks[] as $k ($orig;\n\
|
||||
.[$k] = (if ($orig | has($k)) then ($orig[$k] | on_exists($new[$k])) else $new[$k] end));\n\
|
||||
def filter_keys(f): . as $obj | $obj | keys as $ks | reduce $ks[] as $k ({}; if ($k | f) then .[$k] = $obj[$k] else . end);\n\
|
||||
def to_output: . as $in | $in | keys as $ks | reduce $ks[] as $k (\"\"; . + $k + \" \" + $in[$k] + \"\\n\") | rtrimstr(\"\\n\");\n\
|
||||
$all | filter_keys(. != \"dragonflow-legacy\") as $all |\n\
|
||||
$all | keys as $k |\n\
|
||||
reduce $k[] as $var ({}; . += (\n\
|
||||
($all[$var][\"states\"] | map_keys($var + \".\" + .) | invert)\n\
|
||||
| join(($all[$var][\"entrypoints\"] | map_keys($var + \".in.\" + .) | invert))\n\
|
||||
| join(($all[$var][\"exitpoints\"] | map_keys($var + \".out.\" + .) | invert))\n\
|
||||
)) |\n\
|
||||
to_output\n\
|
||||
"
|
||||
cmd = "jq -r -f /dev/stdin " DATAPATH_ALLOCATION_FILE " << \"EOF\"\n" jq_code "\nEOF"
|
||||
while (cmd | getline) {
|
||||
split($0, a);
|
||||
id_to_name[ a[1] ] = a[2];
|
||||
}
|
||||
close(cmd)
|
||||
}
|
||||
|
||||
{
|
||||
head = ""
|
||||
tail=$0
|
||||
while (match(tail, /(resubmit\(,|table=)([0-9]+)/, arr)) {
|
||||
repl = substr(tail, RSTART, RLENGTH)
|
||||
head = head substr(tail,1, RSTART-1) repl "(" id_to_name[arr[2]] ")"
|
||||
tail = substr(tail, RSTART+RLENGTH)
|
||||
}
|
||||
print head tail
|
||||
}
|
|
@ -1,18 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# If no root path was supplied, we assume we are at the root of DragonFlow
|
||||
# project
|
||||
DRAGONFLOW_DIR=${1:-.}
|
||||
|
||||
SRC_FILE=${DRAGONFLOW_DIR}/dragonflow/controller/common/constants.py
|
||||
DEST_FILE=${2:-${DRAGONFLOW_DIR}/tools/add_table_names}
|
||||
|
||||
# The following one-liner awk script does the magic.
|
||||
# First - adds the script prefix
|
||||
# Then - it parses the SRC_FILE, for every constant that contains the word
|
||||
# TABLE, it creates an entry in the awk file dictionary from the table ID to
|
||||
# its name
|
||||
# Lastly - after all lines are done, it adds the hard-coded actual body of
|
||||
# the script
|
||||
awk 'BEGIN {FS="="; print "#!/bin/awk -f\n\nBEGIN {"}; /^[^#].*TABLE[\w]*/{gsub(" ", ""); name=$1; id=$2; line=" id_to_name["id"]=\""name"\""; print line }; END {print "}\n\n{\n head = \"\"\n tail=$0\n while (match(tail, /(resubmit\\(,|table=)([0-9]+)/, arr)) {\n repl = substr(tail, RSTART, RLENGTH)\n head = head substr(tail,1, RSTART-1) repl \"(\" id_to_name[arr[2]] \")\"\n tail = substr(tail, RSTART+RLENGTH)\n }\n print head tail\n}\n"}' ${SRC_FILE} > ${DEST_FILE}
|
||||
chmod +x ${DEST_FILE}
|
Loading…
Reference in New Issue