from random import choice
from copy import deepcopy
import networkx as nx
import matplotlib.pyplot as plt
import sys
sys.setrecursionlimit(5000)
def init_graph():
G = nx.Graph()
edges = []
# with open('example.txt', 'r') as graphInput:
with open('case3.txt', 'r') as graphInput:
# with open('case1.txt', 'r') as graphInput:
for line in graphInput:
ints = [int(x) for x in line.split()]
edges.append(ints)
G.add_edges_from(edges,weight=1)
# nx.draw(G,with_labels=True)
# plt.show()
return G
def merge(G,s,t):
neighbours = dict(G[t]) # 要把这个点切出去
G.remove_node(t)
for i in neighbours:
if(s==i):
pass
elif(G.has_edge(s,i)): # 如果t的邻居也是s的邻居,就把这条流的大小移动给s,让s负责传输
G[s][i]['weight'] += neighbours[i]['weight']
else: # 否则
G.add_edge(s,i,weight=neighbours[i]['weight'])
return G
def min_cut(G,s,clo):
"""[summary]
Args:
G ([type]): [list of edges]
s ([int]): [start node index]
clo ([type]): [description]
Returns:
[type]: [description]
"""
if(len(G)>2):
clo = max(G[s].items(),key=lambda x:x[1]['weight'])[0] # 根据权重来选一个邻居节点。并列的话,仍然返回第一个
# import ipdb
# ipdb.set_trace()
merge(G,s,clo)
return min_cut(G,s,clo)
else:
return list(dict(G[s]).keys())[0],clo,list(dict(G[s]).values())[0]['weight'] # 只剩下两条边的时候,就返回当前割的起始节点和权重(1st,3rd元素) 以及 切出去的点 col
def stoer_wagner(G,global_cut,u_v,s):
"""[summary]
Args:
G ([type]): [list of edges]
global_cut ([int]): [max cut number so far]
u_v ([list]): [最小割出现的s和t节点对]
s ([int]): [start node index]
Returns:
[type]: [final global_cut and u_v]
"""
#print("number of points:",len(G))
if(len(G)>2):
clo = 0
u,v,w = min_cut(deepcopy(G),s,clo)
merge(G,u,v)
if(w<global_cut):
global_cut = w
u_v = (u,v)
return stoer_wagner(G,global_cut,u_v,s)
else:
last_cut = list(dict(G[s]).values())[0]['weight']
if(last_cut<global_cut):
global_cut = last_cut
u_v = (s,list(G[s])[0])
return global_cut,u_v
if __name__ == '__main__':
G = init_graph()
s = choice(list(G.nodes())) # randomly choose a vertex
global_cut = 99999
u_v = ('0', '0')
global_cut, u_v = stoer_wagner(G, global_cut, u_v, s)
print("global min cut",global_cut,"\nnodes:", u_v)
这个可以求出最小割边集的s和t节点对是什么以及对应的最小割边是多少。
然后可以用图论:最大流最小割详解里面最后的方法寻求最小割对应边
import copy
from collections import deque,defaultdict
from linecache import getline
"""
order = dict()
def buildNode2Order():
global order
pass
def Node2Order():
pass
"""
def hasPath(Gf, s, t, path):
# BFS algorithm
# V = len(Gf)
# visited = list(range(V))
# visited = defaultdict(getFalse)
visited = dict()
# for i in range(V):
# visited[i] = False
for node in Gf.keys():
visited[node]=False
visited[s] = True
queue = deque([s])
while queue:
temp = queue.popleft()
if temp == t:
return True
# print("temp =", temp)
for i in Gf.keys():
if not visited[i] and (Gf[temp].get(i,0) > 0):
queue.append(i)
visited[i] = True
path[i] = temp # record precursor
return visited[t]
def max_flow(capDict, s, t):
maxFlow = 0
Gf = copy.deepcopy(capDict)
print(f"Gf is {Gf}")
V = len(Gf)
# path = list(range(V))
path = dict()
while hasPath(Gf, s, t, path):
print(f"path is {path}")
min_flow = float('inf')
# find cf(p)
v = t
while v != s:
u = path[v]
min_flow = min(min_flow, Gf[u][v])
v = path[v]
print(min_flow)
# add flow in every edge of the augument path
v = t
while v != s:
u = path[v]
Gf[u][v] -= min_flow
Gf[v][u] += min_flow
v = path[v]
maxFlow += min_flow
nodes = getNodesSet(Gf,s)
return maxFlow, nodes
# return maxFlow
def getNei(cur,resG):
# V = len(resG)
neighs = []
# for idx in range(V):
for node in resG.keys():
if resG[cur].get(node,0)>0:
neighs.append(node)
return neighs
def dfs(cur,visited,resG):
visited[cur]=True
for nei in getNei(cur,resG):
if not visited[nei]:
dfs(nei,visited,resG)
def getNodesSet(Gf,s):
resG =copy.deepcopy(Gf)
V = len(Gf)
visited=dict()
for node in resG.keys():
visited[node]= False
for cur in getNei(s,resG):
dfs(cur,visited,resG)
nodes=[]
# for node in range(V):
for node in resG.keys():
if visited[node]:
nodes.append(node)
return nodes
def testDictInput():
# read in capDict
from collections import defaultdict
capDict = defaultdict(dict)
with open("case1.txt",'r') as f: # nodes with continuous indice
lines= f.readlines()
for line in lines:
nodes = list(map(int,line.split()))
fromNode,toNode = nodes[0],nodes[1]
capDict[fromNode][toNode] = 1
capDict[toNode][fromNode] = 1
print(f"capDict is {capDict}")
flow,nodes = max_flow(capDict, 0, 4)
print(f"flow is {flow}, with nodes of {nodes}")
# flow = max_flow(capDict, 0, 4)
# print(f"flow is {flow}")
def initFromNX(G,s,t):
from collections import defaultdict
capDict = defaultdict(dict)
return capDict
if __name__ == "__main__":
testDictInput()