#include <bits/stdc++.h>
using namespace std;
const int maxn = 2e5+5;
// 思路:建两个线段树,一个以L为1R为0 另一个相反
// 这样问题分解为求两个线段树的最长连续1的长度
struct Node{
int Lsum;// 左连续1个数
int Rsum;// 右连续1个数
int val; // 区间最大连续1个数
}wL[maxn*4],wR[maxn*4];
void pushupL(int u,int L,int R)
{
Node left = wL[2*u],right = wL[2*u+1];
int M = L + R >> 1;
wL[u].Lsum = (left.val==(M-L+1)) ? left.val + right.Lsum : left.Lsum;
wL[u].Rsum = (right.val==(R-M)) ? right.val + left.Rsum : right.Rsum;
wL[u].val = max(max(left.val,right.val),left.Rsum+right.Lsum);
}
void updateL(int u,int L,int R,int k)
{
if(L==R)
{
wL[u].val = !wL[u].val;
wL[u].Lsum = !wL[u].Lsum;
wL[u].Rsum = !wL[u].Rsum;
return;
}
int M = L + R >> 1;
if(k<=M)
{
updateL(2*u,L,M,k);
}else {
updateL(2*u+1,M+1,R,k);
}
pushupL(u,L,R);
}
bool InRange(int L,int R,int l,int r)
{
return (L>=l)&&(R<=r);
}
bool OutofRange(int L,int R,int l,int r)
{
return (L>r) || (R<l);
}
Node queryL(int u,int L,int R,int l,int r) {
if(InRange(L,R,l,r))
{
return wL[u];
}
if(OutofRange(L,R,l,r))
{
return Node{0,0,0};
}
int M = L + R >> 1;
Node left = queryL(2*u,L,M,l,r);
Node right = queryL(2*u+1,M+1,R,l,r);
Node ans;
ans.Lsum = (left.val==(M-L+1)) ? left.val + right.Lsum : left.Lsum;
ans.Rsum = (right.val==(R-M)) ? right.val + left.Rsum : right.Rsum;
ans.val = max(max(left.val,right.val),left.Rsum+right.Lsum);
return ans;
}
void pushupR(int u,int L,int R)
{
Node left = wR[2*u],right = wR[2*u+1];
int M = L + R >> 1;
wR[u].Lsum = (left.val==(M-L+1)) ? left.val + right.Lsum : left.Lsum;
wR[u].Rsum = (right.val==(R-M)) ? right.val + left.Rsum : right.Rsum;
wR[u].val = max(max(left.val,right.val),left.Rsum+right.Lsum);
}
void updateR(int u,int L,int R,int k)
{
if(L==R)
{
wR[u].val = !wR[u].val;
wR[u].Lsum = !wR[u].Lsum;
wR[u].Rsum = !wR[u].Rsum;
return;
}
int M = L + R >> 1;
if(k<=M)
{
updateR(2*u,L,M,k);
}else {
updateR(2*u+1,M+1,R,k);
}
pushupR(u,L,R);
}
Node queryR(int u,int L,int R,int l,int r) {
if(InRange(L,R,l,r))
{
return wR[u];
}
if(OutofRange(L,R,l,r))
{
return Node{0,0,0};
}
int M = L + R >> 1;
Node left = queryR(2*u,L,M,l,r);
Node right = queryR(2*u+1,M+1,R,l,r);
Node ans;
ans.Lsum = (left.val==(M-L+1)) ? left.val + right.Lsum : left.Lsum;
ans.Rsum = (right.val==(R-M)) ? right.val + left.Rsum : right.Rsum;
ans.val = max(max(left.val,right.val),left.Rsum+right.Lsum);
return ans;
}
int main()
{
int n,q;
cin >> n >> q;
for(int i = 1; i <= n; i++)
{
updateL(1,1,n,i); // 初始全为L
}
for(int i = 1; i <= q; i++)
{
int x;
cin >> x;
updateL(1,1,n,x);
updateR(1,1,n,x);
cout << max(queryL(1,1,n,1,n).val,queryR(1,1,n,1,n).val) << endl;
}
}
对于这个代码主要解决的是求最长连续0或1的串 初始时L为全1,R为全0
comment 评论区
star_outline 咱快来抢个沙发吧!