You are given a string s
containing lowercase letters and an integer k
. You need to :
- First, change some characters of
s
to other lowercase English letters. - Then divide
s
intok
non-empty disjoint substrings such that each substring is palindrome.
Return the minimal number of characters that you need to change to divide the string.
Example 1:
Input: s = "abc", k = 2 Output: 1 Explanation: You can split the string into "ab" and "c", and change 1 character in "ab" to make it palindrome.
Example 2:
Input: s = "aabbc", k = 3 Output: 0 Explanation: You can split the string into "aa", "bb" and "c", all of them are palindrome.
Example 3:
Input: s = "leetcode", k = 8 Output: 0
Constraints:
1 <= k <= s.length <= 100
.s
only contains lowercase English letters.
Related Topics:
Dynamic Programming
Let dp[k][i][j]
be the minimal number of changes needed for s[i..j]
with k
divides.
dp[1][i][i] = 0
dp[1][i][j] = dp[1][i + 1][j - 1] If s[i] == s[j]
= 1 + dp[1][i + 1][j - 1] If s[i] != s[j]
dp[k][i][j] = min( dp[k-1][i][t] + dp[1][t+1][j] | i + k - 1 <= t < j )
// OJ: https://leetcode.com/problems/palindrome-partitioning-iii/
// Author: github.com/lzl124631x
// Time: O(K * N^3)
// Space: O(K * N^3)
class Solution {
typedef long long LL;
inline void setMin(LL &a, LL b) { a = min(a, b); }
public:
int palindromePartition(string s, int K) {
int N = s.size();
vector<vector<vector<LL>>> dp(K + 1, vector<vector<LL>>(N, vector<LL>(N, 1e9)));
for (int i = 0; i < N; ++i) dp[1][i][i] = 0;
for (int i = N - 2; i >= 0; --i) {
for (int j = i + 1; j < N; ++j) {
dp[1][i][j] = (s[i] != s[j]) + (i + 1 <= j - 1 ? dp[1][i + 1][j - 1] : 0);
}
}
for (int k = 2; k <= K; ++k) {
for (int i = N - 2; i >= 0; --i) {
for (int j = i + k - 1; j < N; ++j) {
for (int t = i + k - 2; t < j; ++t) {
setMin(dp[k][i][j], dp[k - 1][i][t] + dp[1][t + 1][j]);
}
}
}
}
return dp[K][0][N - 1];
}
};
In Solution 1, for the k >= 2
cases, we don't need to iterate all i, j
combinations.
Let pal[i][j]
be the minimum changes needed to make s[i..j]
palindrome.
Let dp[k][i]
be the minimum changes needed for s[0..i]
with k
divides.
pal[i][i] = 0
pal[i][j] = dp[1][i + 1][j - 1] If s[i] == s[j]
= 1 + dp[1][i + 1][j - 1] If s[i] != s[j]
dp[1][i] = pal[0][i]
dp[k][i] = min( dp[k-1][j] + pal[j+1][i] | k-2 <= j < i )
// OJ: https://leetcode.com/problems/palindrome-partitioning-iii/
// Author: github.com/lzl124631x
// Time: O(K * N^2)
// Space: O(K * N^2)
class Solution {
typedef long long LL;
inline void setMin(LL &a, LL b) { a = min(a, b); }
public:
int palindromePartition(string s, int K) {
int N = s.size();
vector<vector<LL>> pal(N, vector<LL>(N));
vector<vector<LL>> dp(K + 1, vector<LL>(N, 1e9));
for (int i = N - 2; i >= 0; --i) {
for (int j = i + 1; j < N; ++j) {
pal[i][j] = (s[i] != s[j]) + pal[i + 1][j - 1];
}
}
for (int i = 0; i < N; ++i) dp[1][i] = pal[0][i];
for (int k = 2; k <= K; ++k) {
for (int i = k - 1; i < N; ++i) {
for (int j = k - 2; j < i; ++j) {
setMin(dp[k][i], dp[k - 1][j] + pal[j + 1][i]);
}
}
}
return dp[K][N - 1];
}
};