首页 > 代码库 > POJ 3801 有上下界最小流
POJ 3801 有上下界最小流
1: /**
2: POJ 3801 有上下界的最小流
3:
4: 1、对supersrc到supersink 求一次最大流,记为f1。(在有源汇的情况下,先使整个网络趋向必须边尽量满足的情况)
5: 2、添加一条边sink -> src,流量上限为INF,这条边记为p。(构造无源汇网络)
6: 3、对supersrc到supersink再次求最大流,记为f2,这里判断是否为可行流。(要判断可行,必须先构造无源汇网络流,因此要再次求最大流)
7:
8: 此网络流的最小流即为 sink -> src 的流量
9: */
10:
11: #include<iostream>
12: #include<cmath>
13: #include<memory>
14: #include <string.h>
15: #include <cstdio>
16: #include <vector>
17: using namespace std;
18:
19: #define V 150 // vertex
20: #define E (V*V) // edge
21: #define INF 0x3F3F3F3F // 1061109567
22:
23: int i,j,k;
24: #define REP(i,n) for((i)=0;(i)<(int)(n);(i)++)
25: #define snuke(c,itr) for(__typeof((c).begin()) itr=(c).begin();itr!=(c).end();itr++)
26:
27: struct MaxFlow
28: {
29: struct Edge
30: {
31: int v, w, next; //w for capicity
32: int lb,up;
33: } edge[E];
34:
35: int head[V]; // head[u]表示顶点u第一条邻接边的序号, 若head[u] = -1, u没有邻接边
36: int e; // the index of the edge
37: int src, sink;
38: int net[V]; // 流入此节点的流的下界和 - 流出此节点的流的下界和,对于带上下界的来进行使用
39:
40:
41: void addedge(int u, int v, int w, int lb = 0, int up = INF, int rw = 0)
42: {
43: edge[e].v = v;
44: edge[e].w= w;
45: edge[e].next = head[u];
46: edge[e].lb = lb, edge[e].up = up;
47: head[u] = e++;
48: // reverse edge v -> u
49: edge[e].v = u;
50: edge[e].w = rw;
51: edge[e].lb = lb, edge[e].up = up;
52: edge[e].next = head[v];
53: head[v] = e++;
54: }
55:
56: int ISAP(int VertexNum )
57: {
58: int u, v, max_flow, aug, min_lev;
59: int curedge[V], parent[V], level[V];
60: int count[V], augment[V];
61:
62: memset(level, 0, sizeof(level));
63: memset(count, 0, sizeof(count));
64: REP(i,VertexNum+1) curedge[i] = head[i];
65: max_flow = 0;
66: augment[src] = INF;
67: parent[src] = -1;
68: u = src;
69:
70: while (level[src] < VertexNum)
71: {
72: if (u == sink)
73: {
74: max_flow += augment[sink];
75: aug = augment[sink];
76: for (v = parent[sink]; v != -1; v = parent[v])
77: {
78: i = curedge[v];
79: edge[i].w -= aug;
80: edge[i^1].w += aug;
81: augment[edge[i].v] -= aug;
82: if (edge[i].w == 0) u = v;
83: }
84: }
85: for (i = curedge[u]; i != -1; i = edge[i].next)
86: {
87: v = edge[i].v;
88: if (edge[i].w > 0 && level[u] == (level[v]+1))
89: {
90: augment[v] = min(augment[u], edge[i].w);
91: curedge[u] = i;
92: parent[v] = u;
93: u = v;
94: break;
95: }
96: }
97: if (i == -1)
98: {
99: if (--count[level[u]] == 0) break;
100: curedge[u] = head[u];
101: min_lev = VertexNum;
102: for (i = head[u]; i != -1; i = edge[i].next)
103: if (edge[i].w > 0)
104: min_lev = min(level[edge[i].v], min_lev);
105: level[u] = min_lev + 1;
106: count[level[u]]++;
107: if (u != src ) u = parent[u];
108: }
109: }
110: return max_flow;
111: }
112: void solve()
113: {
114: int n, m;
115: while (scanf("%d %d", &n, &m) != EOF)
116: {
117: if (n+m == 0) break;
118: e = 0;
119: memset(head, -1, sizeof(head));
120: memset(net, 0, sizeof(net));
121:
122: int s = n+1, t = n+2;
123: src = http://www.mamicode.com/n+3, sink = n+4;
124: char a[5], b[5];
125: int c;
126: while (m--)
127: {
128: scanf("%s %s %d", a, b, &c);
129: int u, v;
130: if (a[0] == ‘+‘) u = s;
131: else sscanf(a, "%d", &u); // 注意这里读取信息
132: if (b[0] == ‘-‘) v = t;
133: else sscanf(b, "%d", &v);
134: net[v] += c, net[u] -= c;
135: addedge(u,v,INF,c, INF);
136: }
137: vector<int> CE;
138: for(int i=1; i<=n+2; i++)
139: {
140: if(net[i] >=0)
141: {
142: CE.push_back(e);
143: addedge(src, i, net[i]);
144: }
145: else
146: {
147: CE.push_back(e);
148: addedge(i, sink, -net[i]);
149: }
150: }
151: int flow = 0;
152: flow = ISAP(n+4);
153: int p = e;
154: addedge(t, s, INF, 0, INF);
155: flow += ISAP(n+4);
156: bool flag = true;
157: for(int i= 0; i< CE.size(); i++)
158: {
159: if(edge[CE[i]].w !=0)
160: {
161: flag = false;
162: break;
163: }
164: }
165: if (!flag)
166: printf("impossible\n");
167: else
168: printf("%d\n", edge[p^1].w);
169: }
170: }
171: } sap;
172:
173: int main()
174: {
175: // freopen("1.txt","r",stdin);
176: sap.solve();
177: return 0;
178: }
声明:以上内容来自用户投稿及互联网公开渠道收集整理发布,本网站不拥有所有权,未作人工编辑处理,也不承担相关法律责任,若内容有误或涉及侵权可进行投诉: 投诉/举报 工作人员会在5个工作日内联系你,一经查实,本站将立刻删除涉嫌侵权内容。